From c13ff2daabf0b386e9c4c190c10a81a5af9074b1 Mon Sep 17 00:00:00 2001 From: AhmedBasem20 Date: Mon, 14 Aug 2023 23:54:06 +0000 Subject: [PATCH] =?UTF-8?q?Deploy=20preview=20for=20PR=20254=20?= =?UTF-8?q?=F0=9F=9B=AB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pr-preview/pr-254/404.html | 4 + pr-preview/pr-254/assets/MARVEL-32e738c9.png | Bin 0 -> 17494 bytes pr-preview/pr-254/assets/MaX-099f261c.png | Bin 0 -> 26127 bytes pr-preview/pr-254/assets/index-2e560d45.js | 322 + pr-preview/pr-254/assets/index-ae9b867b.css | 1 + .../assets/logo-white-text-16948862.svg | 157 + pr-preview/pr-254/favicon.png | Bin 0 -> 5286 bytes pr-preview/pr-254/index.html | 24 + pr-preview/pr-254/plugins_metadata.json | 30114 ++++++++++++++++ pr-preview/pr-254/status-alpha-d6af23.svg | 1 + pr-preview/pr-254/status-beta-d6af23.svg | 1 + pr-preview/pr-254/status-inactive-bbbbbb.svg | 1 + pr-preview/pr-254/status-planning-d9644d.svg | 1 + pr-preview/pr-254/status-stable-4cc61e.svg | 1 + 14 files changed, 30627 insertions(+) create mode 100644 pr-preview/pr-254/404.html create mode 100644 pr-preview/pr-254/assets/MARVEL-32e738c9.png create mode 100644 pr-preview/pr-254/assets/MaX-099f261c.png create mode 100644 pr-preview/pr-254/assets/index-2e560d45.js create mode 100644 pr-preview/pr-254/assets/index-ae9b867b.css create mode 100644 pr-preview/pr-254/assets/logo-white-text-16948862.svg create mode 100644 pr-preview/pr-254/favicon.png create mode 100644 pr-preview/pr-254/index.html create mode 100644 pr-preview/pr-254/plugins_metadata.json create mode 100644 pr-preview/pr-254/status-alpha-d6af23.svg create mode 100644 pr-preview/pr-254/status-beta-d6af23.svg create mode 100644 pr-preview/pr-254/status-inactive-bbbbbb.svg create mode 100644 pr-preview/pr-254/status-planning-d9644d.svg create mode 100644 pr-preview/pr-254/status-stable-4cc61e.svg diff --git a/pr-preview/pr-254/404.html b/pr-preview/pr-254/404.html new file mode 100644 index 00000000..5d2bb0be --- /dev/null +++ b/pr-preview/pr-254/404.html @@ -0,0 +1,4 @@ + + diff --git a/pr-preview/pr-254/assets/MARVEL-32e738c9.png b/pr-preview/pr-254/assets/MARVEL-32e738c9.png new file mode 100644 index 0000000000000000000000000000000000000000..d17a6eda2932c8ca2c994730d9d1124de7aaa384 GIT binary patch literal 17494 zcmbqZRa6{Hv>g~2+}$05yF0->Kmr7p;O_43?iwsWu;4y81b2tQ-Gck$zOVQHR|e-1pop<0Dz~DP{1Jo;K~L792)`v0vP}Ro_AY%m?Nx=Jm zS3zf4@<#;0K~~%O<86!oH7JQs*&ZK3I2So3DY$J!cwA&Oq1P%t0N}HfoTRvh$I4l* zyN~XC=6!d!bD3<>F+=RGL>*K%u9qA+lmvM(I&=tiw!9Zb?%evgnY()0dI!BJP@5+; z0Xka@x>yy{>tdY;pY5CL7cx0c=gI3gM>D2ne4Qp59qQ|6pK596i(KzX{(F%CXGnUv)F)LIJ6jx8a7deY)1PohlfiJwKUlSs?;ybV4Z+&a$hWbJNt{JE0$UfVm^j$)(`zzW?C1!wFgynUBD z{R!?=E`cWnw4V#s`4lrXle7-I15?GU4ei{h2_&ONX93#;OJU)C$HE7Q_d23GxLFDT z;s!jtT{XzFbBoy>0;r&<2hX^9(tc;J`dHTEwOD0czQoaWNOt$M^m_}gaCm-vHP2IE zXq8qQE%*&g$EMKC#aqWYJ(Dz`sTu5ncKLVaoRY0AbGDMeTSU=tjn?G4r0F+PPV5p!1tL&wZ@;8(mg+Is2W2AYTYh%uWI=LL^c7;X;FlI4=l;l~zdn_$2L9#|Z z=ozBtsxTeye9HRtHp|#|H>41WL3m4N^aN_QS}t+AOsFx2Mo%aE-!`09t8c*YyvfV(v-0_K$q9Sj^b|F-=maVPefO~f7jK!>F z;cP+gwfBLUmi2)-d1GK3dpmHf+1?M4J1D-~WumLg2t7AxCOdmyVHw zk_~nY0#6-&T)ZaI-?yp2y));^5Ffg|r~&3HPN7RY$iKTdJvNnmS_d6IH-T;DFMktl zSNMiNL4XjhwGNLYfMyI-?VzrFyM;h60`kN&HG68CTze z=5zIak5B{r5ZAOk-rQVSRGQZ=pyrUH}-)7l@Vj(dgNjHBj+TR3#VB zfPBthj&|AA?c?xaIQb#5b9)ly!dG|t?#^t{jutH9$7b^F`5X@ve;5Nr;{;z(3UAD09Rhox_mS!Mo1s27N}4-p5ygZ%fIsYWW9H#6*>waH=YI`I4-6(~-GqF+gJ*O$ z^6*O?Vm$e^bL{mE-g@OElN=+~pZ}er`UKRaA5hfhk9cx|6`KLwB=?m&?E2x6X=(h) z8}B0_y}EyrVx*9XvKL%;3*lw5R_wn}c$!H@bY$hUh3*at7X z3lrrx!DUnF(=C&L-4U!9!6HG+UWYe}|Ny<!c5xPqfuWmsd8uq9;RC`**#mQ8nEX z*Wcq&b@2VR?Bk=J;E&K_d&F$s&j=1T=yMByZbkqH422~b(*iX=jNO-@vH6*rJU_NW z4OCZ@MgCaaV4|2j=G@-8e7keVqb=_jtG?E=v)tjGKr&!_x|$be zCDC95Gn9GuVo!#}--*@pK|X+qhy$v41#k`OZnoDVx3N)k!KE245>H@iV?#=zX&HXz z0DrI}VWR&O11bPtvUx*_4E>D0&jyJk1P~djfa#Z%zVL%@M}J3K~FE*y4x&cq}G;}C7qqN zC+>v)dO`)Bh{sOL?d4bx?vd5T$TewyyM_cq($XLxOYx5cSu5NeX@LqpKOcAY8_44$ zXdGP)EqNg%sGAuuyn8M61g!4q*FZ2~m|r@MUBc%gij`{^<(xT^w)mnOyD*OSI^SIY*a z+}i7;w$Cgs3L3dblY$8^d`~y>?4jS|FQGsFk))p3_M0QAczGuWvS}!fhzA$6O(5M= zL$MnLmDZT8IlN#q3f~O&gI83D1XhvG_OHtJ_R+93Mz)RK4BiFHB)@^dlPy7<>+c6@ z4YH^e?AEZ}1eb0>X&QdjD$oW)(-%#rjKt_PYzChY^$ZJZp3$KxMB-em6%?HBC4Flt z`1$}}5s&``&78YIe8`)DGm0S&gSlJW$>3A9UO~>P+OMfaRaHQ;{boD`Vx*j}0ynf{ zcAuWMRSp^(YTJBI?n-Q$5MZSr86d(jj^oyWNG6lT6?DrsSg2cD$h``BgRC7bGFX{S zLm=f&@IAswZ93Z4Kt4D&%yv#?tP&nJP(*^JLI^Lz1Xq&y zQr?=HmT?n}hMGWo780*iqZ*8t3jS5agFE(IDil4At@IP@MqtQ<#_vxwM<}V&VGx#cBuqswyQku8<>BVnL^vXBmv9a+0t5jfudFy6`sFaEw>Zn z&PYfaOtVKv<>}BD=sCgRO1c*@k?KQ(5leUJlRzMh>jFk8aVJdajV45XsxwTw`{sr$ zFI~=rG9hGZF{n$4je{E<|4wH2QPSGk*+_xj`TO@jsxcH!_CFMN2w*3MUS^ZQ#ln~H z+xtfrvMbsdm!Fy%T|;#xw$t&4F!wb9_Rw#Hkwp zj3K^M5DXE(j`nw74d7dwq4<5J`&8}B!nC+0{V8G3J4PM>SbM((dbY8u%GuJ|8_Fb^ zK++DO2j$O8oL=}9kwb%uVbs^2g*qjs+Rmcn1sk{FT6=W9e`-M(ht3-$M0YD94{* z7#wY=sQRTHf%bi5>93+ZSTJ^1fwqxqXQ-nHOD?#;AJ%U;V`woDv?+5g#;I~bE#u?r z=9VNU74RBTSMbVp`DNs6i?v8&T6^a$R>TSpwpRzd0Cqu zA_P%|0OXIIv1!il>I|A3?qRW||KXc9*0ptDJ`nzDwp&<{(-Y2KoQZ0tENQ*<2#*mA zRKeJfOs@N7A2M95f`t!(hP;BvM$NL3NYo7_E$uG~XjbRKui|A1BuwK1`rs?ho|W!A z-0ZMO)|2Vhe@aTS8(5^zy5oCKJOeNK9@IWqy4l@jW^GP#H{{+;z`^+FKnfzkC>h^= zqfveQ;%h88?gyI_9T6S+!|D74ECq9N;AeOcG%*st8$KS9gI~iE54|}jyQM!`taUfM zD>6lnz=iej$z&9ZAL5PaJ}4raOd>Thx29CK9FD$ zI-d&t%kq!gAK*uPGS%X;k2=)CK0oJK^j?>!%IN5j0ZS7LIR$znv?WOm)fNxlouY@L zsN&{Y2h8$?!_R2v8|-TAnX+AdTNMf&#kFRwFc*Ae)uSIRHlI21LsE}lvoazIZ zUd#>XvFSere%>tIp0XMEln=t`bPMC)A+}1(5uTlKK}cr|^C${nvuBQEMYqS)BwW9~ zX@W)34(5S%Fez^yIv=((2x6|2_xu~s6<6;6cVR!qGeoz4Z>{x%rC@={<$v8=+(7M| zx_<}fiIkypk$;w3TB(7A?@zttN%bn>QPFwh1bq-MZEJB6Gs)TMplnG<@X?9f(UTTZ zuf7*A0{L1$FY|>??|D3P;!g^K&+W4j* zJ18}1VS!0uLI?&z22X1EL)XrsPlHJro~M+WR|tvKc7260fER6I>kg%;>As8c>bTFe=};X7$815VbMIPjzIs-C8KRy`;#yT z^@}%j51|pi8SkJj=DW(FCW7HDwGQ#YL~gXF$IBn@^jgP}BpwKK&!Gl>!is=^!uq}y zB+D^eA2El%U!yeo?C`LlzJ`onSke$5EgcoXgVW4_w>~p@IV=J5n+g^*(qHlCa7=tH z{-lurvaM5nnDt}=(|_AwWu4(XZ1e3T3L2GPY#!J|ngSlk4`_X$W_7)Vv@W__u#f4C zH$p1u-v`*ku5#j0wc9|bUa$L6giP9G@H0v~;mv8vI`L(zQPk~iG1j!+z*7F_gs5l) zh-v5lKLijeM3iZdUeFk0x~TDaXb)NXeOq04svTTV(qu2F)$-Jg(~ z@^{)Biw@y*6c;>B=|B)R0UoL;CRjWM7Fq0*=i_HnEEcLiuWSKC;@v;SVlM#@rTxdr zg$1*f6#)fySnQ>xY^(6`Gre5BhNdP=JVajUk=-Hq$ZK3x`Bkbb8Ek@M>G{{C zeB1pdEjwA+UNpWRwTF4Epb)apB80!$w`MwxEzgoJfYl^NaG7`cAn={=QPvTzq(I?pJGs*k70T1k`t?sgL2o{Fz{lcHsVO`!RG zKwBY0VUoOfN4q`KS-*D>l|Yo39+kOLc|rLKU!phqCzXn<%Ns)wUV1=ZWXeC0Vsu3N zJQRZ8W@D7AO@>D19mmTgoJ@_4jngZ0{V{QGqE(X%{z(N3P3Td{os?e{bK~(h3bRuj z9w%9f8p{#J?0Cu)@R3%+Rju|AjT)4oi>5UXEuqhW9+WpEs}u-PkW3#{>tHrZ2S_U&a{wm(0n zVas+^K~*J^wI!4bEN*mNv7e^N6?|65ZKW|aWsz9Ky)ET$PSNkj&tU8-+TWjSlUqPM z6fw|?e9MPO)TBO939im>O}Z{{h9O^5Hm&rnIX5)I6JguXeP`+2>KaQBg1@WcHqf3$ zb@n+XO~l=y3o|KlS1*Nf>gWA6aLbG6-zkIq)rM5J4>>U~|YdOcvl} zD%ry^`>5s-!`fd=vE~T!19beCV<@fIzlSU|43G#{eMuXs{-qUFri^Y}Z{3*8(5($G zM#gciA9(~+QQG=NQxQ>-9O zDl%k&Djs9EF{BDkP8ur;!Yr+3bA$dV`=-MQ zjC=hKCfQDAelC2x99z#C5$;Ks5Gc4WI@}Vcsfix^6v?}h6%`fDJ|gk=?m$tl77ljR z#wIqCh)#QhrpoNj^#9mforIZMhuA~8lR~z)3}$E3g6VM*3ve1ot>X)pE6;p;ocBz$a%Tgp*L8z z^&FQlCEglLgA1i$Zv{W373_(<`CxbZBBqTFcRiy!27$=ywzhNMS^r-JHiivn(qpJXNqd9gmO9M6*QcG z==!)vPne2VeMuaWdNwwg_5zl5?99gX0}Xe9>mv*<`740<#0J z#TFH?!$4(Z80J{^Eq{~Q;MX&%|XvDBrpzMJ?>B{5q>WR94K1iG7yJLww)eZe|( zkTdFy67-5Zf28_O49a4YB+NlsPSqGr?DW?3nf)&H&$cGQ0>mBB%3P7BdHJByD1q_l@vB^KAg0qfAk(ALi41lcw_(tW__8TwoN zJ)U&ES$B!mCUAJ&5eZjc9$we9@4Zk_c;`(sl2l=6h|F1n1^OT%n(ew@_@w9gi$r5j zv^)1zTyg_JqzOLNN*IcyU`^_7$2VOirOZdeq9Sa;_F>WXsFt$(M@i)F3oPrCSWAk? zN^8!HglnGY1l`wLFoGG=rP&usr-p?^$K77f4yzhrK2!ZPZEgaXhw)w6SZXgB>Bn`d z(TC~v`|G(jQtC_y1w@p_KeUj-d=N5KO|@O2$eoB~c83NIP5dQnKB0*Ex0s%g8YU7G zD(ziGmKr`po7T9TDOaO{Rg|_|<0pm;`P=5r&GiMz$)^mQ0O#E`v~8cj#A#+t%{AJO zyguwIlnlJi?>)8tvJ*3Rs^Wt?dcc111zQ~3_T}j3_{HX$qbBB@@-Y94LmcA}Chop+F>b-Y>!X{vI`5Z*qUQ3CBz*0Vp{ zCl6b>_s)AeqXhWlSaSRjCrCH3kcHvr&+UT+spaKis-G^@&=l^gjv#j6=uoSS~`FOeFHKWC$M?S;f1s^!+5CkJ&UHmgam8R+ohy(h?-7{wv=?|{4 z`p5R4mMfr(I?Us^_`e#|N)!Toux~^m+CKL?>TZ))T5d}s)bL3QP3acA14vkop2{y& z!Hn%W3lq0Idr5ri$vq)hJZL*O;BuIqSzY^57s{UVdP07;_|Tjd541bmywAP@8eeX8 zPEO1q)L}?c)jD-a(dQlUaI5WXoW&?oV}1l-a>0|!|F-+r z$_1F7WU8emeuC8}X{+51h_?>wMmQxh-@ocO+=JX=M4Tn`a~)eJt=}DEzh9GfHnOp( zujtfyb60-*ILiwAn4V0UO(fT*b~u|RS5n;;C46GkXM|vvh7W_y@)3AH>;31QG;Adf3nX73;n=J-F~W~yquC=ePfLu>=|^kTu<~koI@a~ z-a$)6eh|yrs9el_%o4LW@=G*?PTb1Np&*U6NToZRyth3zI_ z!`jSsqkt#Z*mBk;&Y(eX=&Y5NVl>p%MlTM}c%riu36b^;<3N6UMsHuZ=Y2X|QR}g$mFXuTFwH2a*O~L{ zH$$X7yxSS9q02k_2p*OF?JrZla;lOMi?5^e zDcna<(529V^wFBEI1aiP@3do%)ge`cR~$&7KblJP({@n^^4pRObv&YfOp;oPDJN*K z$qZ(6HAfx2&j&w_7Yo;!3^A>emBJiJjAe_GJlijqi)1J8Z z4i@V5IXR@=BVXyl9pEhTDuu~uURPhBfvBmcbl#L*D`CX)vmJka{b|I8WYgB5{qv&V zaqTN#*$-TKsd-S1*bCz&?AV8j?d0lOe7BZc;hUZ?X77;Qb$;L*l z!hqN~X^(Eek!hsKn0K;sW%sI9y7!5lcGqH;S@uP;Jk6J1)DQY_C}e{B4kF%b(@a^y z!4;-XWf-;uAdRvl;TS&GG`7sySyPOET6oU*)0(w&T`oL$b0M zU=XIvQ8p5j*5LNJoYX#_DW8eLSl^CG24%xJ?{;v*3ms;eG8_eun48$tx#;FZI1DO6 z4A@o~v-qR6FW%ut7CZ%8byR)(zi5y-a%H%K1u^{$RZNB$HVM#20cYmhGTxv*Sq zPXaRvceKuPTgCSqNSZnfcml8$JJ&>JpHeAX9iTF)P>Y^u;LyjjV3jS$^g%CT4v)#) zU(1l6xP@H%-&VcL2*hX!q{gg&(QOgeZMMyO9Ji6+556t8`n;xKJ*?0V)f-&vj{V*k z+W@{aRs8j3i?aAtUbV9I{DXr)(eTg|@)Oc;k11uhFSsf5YOS(b>(5Z;|cMV{aW3WHkaqE+USBXVso9Muu^sBO(mzxNgFwi3s`Qj zV&$!Wd0gcKjc8yRiUv1-rNbJjM_o&Q8YmR}>$X0YXfw1{*EW*zO ziPEZ%Y?u13|338kH&x;hXjB6d3A>NXGKnkNis22;?hvS1jXDlSqjJ8fGbDxB3Y#fx zleFkF$@rZiL%C*P%r7SMZ?nyKvkuNLgd~>^tlE6arEFc5;E*Af{zIcgvG7`T}O<4t{0{Zi-nXLp$AnV?WJ20E&gdm zz$77Yn&Wn7J<)PI19!=&gibIVZi#wFbzPHUP7ODoa!|!_tgT{pi6qvrjS9rz($CH5 zUO3~i3vK6^$jzu(?1&4cELU!h4{Hn;>Y^ax8;tcs993%nys3OJTkOUz$C#=>?CJxB zo%81%KDqLgQ(fSEvHD!8dV8CDb|^2^7BL;mMzBifi>#Hp+!zxrcIO6*DF?xrC%QS5 zJ#U!GUlL4GsFH{vE3Q*JOGyI}wP9NYCNTGEDf zzTP?=paPXIBFiM65o=LLw)TDVtIZrW8xCh`XXrjOmqtms{HefKM+sXHVjUe1&=Oy( zdmS%1&>ari;8L~S-!;p9(yl;03)ch^_*f28#=VdP-yWCl!5!50G>CFwE7`$_s`HmS zcqrhA1gw+rOZ3*oN`=o-^WGeoqNw&B#u;fg4yq3YFUw9x_1&+}D|t_Y1JuUklQXAP zze>4s@C!G_V#55ML`h~Hgm23B;N<=qzzuk}sJ)LFz6#`z9aErnD{t%I?`SJbE19m?bdq_DXXe6_u5aDkNQK8&YM+**ur{1E`iA9627&gn57t3 z6DUM$B_69R{N*1U^XSxzLnv_y3LVzpfN*9rv&woK*r1LEp1v;>_2Iw`Jz}TmJ+P-U zA5J>-d(1sbdFP$c;4)e7{M`Amn>|jsFrK>mjP+l|eb-_As2IBVycOB9Zj3qTTV>;g zXt95Tb!Fyb;{NMn#!4b)CzUVtW@BIza(!UOCb;uYR~RqXg0goQY@mlVmE*#g7a?VM zj|f*fc*)iMENW>#q`_B$BOprA#~c!O)EZyAF*8w_T#{$L)$2S}oTP zt}a30rWJhEEb4aGqLPUu`(k9!%2e`G4ud-7{QE6>>kGCLPFv@5x&PY!94uC%ct2L# z%|^pGk1=J+(R?<1<~W9DWR#HGP(Z>w63X-hVcv5J?1_zGY8sh&ke?Ri>fm?R8SC(mh? zlr~~xvhih^e=-nLH6@DR-A$O`fT$=t989r|%KQ_=pZR00dOVPc84DyE#m^y%M55Y@L(Y%Ts9u2={QU-jn_I%b#h1Wo zx-#>pY33d@lq7>kQ|()#A8;@F?+X}G7=C;-hE3Wd;#8R#3s#&d#9TfOQ{){TSYc3R zZAdK|VD8t?LR>=sekOHk-lO~r@9J@K_k(#V>BEQhi*U{zh;w`Le_u`L+!}!9d}8g6 zDI}@}9vDL?Ax(Pgzb%mhD-h*^c7K?aX7#)9Ze-NieF{e-CG-vIv8}^KLqnm`Mt)Jf zylFEYD-j09C_0D18rdFRB?^gtUmUF^2+|3#4~!KU1D}#Ycx93G{ZXjmML+hakkRgW zS8$$!Ul7hez+%>wZSZ#W#D$QrQ9wz+vLR7~yFg!nYwH$z3gFccxJsz_^)KX|?a~>F z7iG7_+QRg2^5_{n%*|jP(&9}mgF(MI-X)HqD@@D4y%4Gq=@nJ+{8g6SlmETHmg5s^ zGj_XxmjARrR?QKpwJeZT;dgn8;R=Hq4ANw_>RCwNQ=#Mi9o^rs48i~je2D}hP781Sp4)>v*A2-=! z@Ce9Hs`zUu59a~9x;|lS?gez7`(0U}w!`0wWbbUfs!rr-6DDZt zAo#*?i+%Z-Zd(u(-ulDAsYyp8Dskm@cF(TWc<*x50s2t9H(CkioxB!WEEctavdj zt1VGY0kdPz8RksY{|F7Pfw|xBxH`Fg{f%Z6HVZ;MWE#}1EluYxv{iw6NBvEfFXt3| zPQm$;&ky?n7V~rS+VJJ;{rRz<4Q`Q&ko07{U%+0NSR3#G^k;3`n*r0k5jdZk>SGde5OO77(()nE4K!}XA;AODvE*UZDUfbJR17UCnfvC>nkZ&u7Zf*5@ zII9yHYl@u7v~WC@%tSzRV%l?;{!>j0GEt_!RKiY|{(`~HCiXXB;Djsi9oj4FDOde% z*B*uwCiCaL(Au=p`{Ac)mpKx8g$+tyDk6D(H>`9C#wOrnSEA|yp@7Vi$PFwg~e z+p^AOH=rAJx240pU@YIyavP5Yse{;d>G7LQzyP~% zx_<9e6>p$3EJQFdx+Ya9x+Z_(?`Fei-r7?1A)kXZ!N;-GMJIb5IM6JIn2{kD88KH7 ziaE2G5!}mSColB^)TTSz&BO2~|3k91y;(T8-(z?WJuCW8oidkzOB907*$N$97*{N9 z&+Ic%0KdqaUu1`6@BcO>+#S^XoY+^3y&wCLlNA_+Z9x`DvZ1#pP2rWFI z>O%s^GNiM?`bqBc=hJ3mClk_nwlKIGic$ZNchb7DqZUR5C_2cW-~O!Zs;CUM^yz)Rs~=Hb*pIAQ^z-m7>RA4VW{OIut4d3_W zi$gWpgF7+WS#92q@!g4@tr%%!OLP2|_jyrmo2?f{9n6NhZhhF+GgJypZPJ^|R$q1vO^z| z%suh6FvW&D0}s)V-XYrhP}rn-Oxhb@!7q6z&#+_vyWZ&%v5MnI)ok41Hg1Cb(9$MX z?0tcotH6o%2tLiJ_M#A^%lB`n|F)#+e;sdSw#aKjsZfm%*i@4df6R9Mp`i3amy!j; zw!==iNQH^6+vgE~T7D9M2gpx96)>N`y#VLdQt$l}Q<555yd6II z&i?%>w7NRlG3b?LWlg(Yut%MP_4$z!t-<~3+M8^Dm3MQbhSVb^TyTLcW%4tdnjTyZ z9{nSHkc6L;5CCh0S|T^%5sX1HKneF_MzCr(66!yQviF7qAc7aMTMeLjgj)7e^YDBy zM^TrMG;?9>FF-6Bl!;l_t^PmX<~ZFX(xIcQ7pHFbCOLf$Y$0n_5HdD9N)Fsj{#NM< z9GO$Nd5m2|C476p1PHa4Ne(+PgFAV*7nV9}KTM-VeQQoXYZiC$Ue&`5XP}zJhKo!W z0f+s_nId|z+{RNY&gC#Lkah66x)W~HrGIgewz=CMP0kddYy{C$$r{*2&>n@m~ zOZ9Ed`8xTYan_o@ci)K|1NM{%(t~J+ztsF2+{{b`U}jMbl_yEjPwL+v0BqXr;6gTuO@LTyy#4X!d@1;GL_a3v|c9m zACkJ1D;~aRkLH?RPLBtrWkw~RkpnGQ(`*RB47f6-+s4V&vk_V~B^fD}@1Hxg)j6Dq z8y!T*1dh?;bot^#oUvG1mwl^g3Q3ObO9`)&D4#Wh{eL^hedt4bOKn!$^gs3CT(F~Z+S{_9!(|+MDtvGqME8gH znkEXG6g$FMnDei`LHr%7y(Y>;RQj^tjQ)cD#PpoJN#1LA$B+>Nj*O>d0s;pFN9;WZ zFccz_d-s?t$s;#WI!x7Qt|TMfqNvXn63Ryc=q27tz^&j))n|M*Ob3KlfhKOZe?2yZCLX@8*Apnh*wb5NI_q_P&q`dl}qy z-eRPY)!BbDNqV3$N%Hs>8F8SP6EP~eI{W!8Q;*fMIV_=YlQ6IuNXA<)OjGCY#&RLL zjjp_AjaHEyiK3{YpnL_d@_A_}}5b{5PhJ~kn)lx&A=x_%k?<7-Qp#gQy+vL`+i^A3XR zJT;+9>Fh^*W3o8=2T@e(l#O^_y9ZIwMIB$5abnjf>yN|1?tlhTk9b8*tv}%H)%1&- z@HQjlI4ybhAJo#RBxTBEe6*3~iMo9Lqe&h_)Xb+|KS|<`kF7d*r4Y{Gd=c7iOe`3c1Ri}h#E#x%4At?^aj6xx>647FgF$50 z$Uf@?2JD|$vS3dBnL@6p(k_?QNKp^7`12xo)B4+)$=Tg=s7YBZ^L^ut%@<20pRI$6 zeT@ZVSMVehoEe3I)`_~H9Sw+XaC%1OT?Th>Gno@W?x4kPU{+{uy0PbBH^_Xh5BwEO z7D@_BM7lDV?ppChC%Mpqlr9&=XR=^Lezd1VJ?3&9YukFr{_5M^o`Md)k~--joS*U1 zcw9uGEmyx%gh#%R&88Fn5IvYCgWGPFSmi-g^U70k#NI4Fl^DGmN(A|LW}G(rH{v>M zvOvavy-6O;u+(Ogy=bVga_yc!Uzc4{8gwK|tHb8N=tbP~NU&tE8)xHYsk2Ae!B_NY zH^i5ky}EZip53SiHfFK>hO*2()d&@|GtS~vmwIt62Cuo_$Eb};Zh-+ z;?sKCC&`rKGbRx&xgkA;lx>z^W?7DHKobwAwJHmpbAoVsjKytq_(bgdp&EK?8zoMe zsOInfcZDBbNbmgJ%#aF=pWERjb5tnEHp<|3=iHALL)};&?CiK)^9e+XEpsvvhIiKqslkU|EC-Of9-rW5I5J{ zWKzRa1Q~Qx$)7Fmu<`39r*O_%>5(&|el*hxLSL3ee}Dx7xg7nNw1S)g90rO60UO*+ zpk8ob@n-}}ZrB?Fp#+C=b7OBFn>DsDVFzpZ6*!~1hUMS4k&JjdG#R{r9X&cuLsp)6mW*YQCVV`u@Wqj zqA_t6!KNh2LW`KnG?GT%g1gwhG0(<$>lFtjsFbNOKEpXtCt(;?$!>fO=UgS>gV8|C z$h7ZkS>!*1I~6&9lj3Yg)k3mL;Y0WZrxu11Y=={EyR;)#jgA+wn}i@0&7LNB5S^(0 z$Krf%BLfOIds#pZP1}P^Kc1rZ+8I2Hx_(N@%wAst;{Nbe&6R|9V~WEyvoTWrsZO=O zbo8Xe1o9{0;vx=stB-`pX?Rb{nP~*?VVo6&V~y+@Qi!fly8J5E4lrsO?Ds{jV0#L< z6)B$lg~!8MQ7wNFe#5V7lZP|Ge^E?Z%uBwWtIU64q91?5GbU`jzvz9DJke@Io_6CT z*BsQ=vmhP$Ycvu_vln{K9Jjntre57ftr&;#=LQspr_PDF7tM0cqCn1poOVXAKGseYE*h(mhNrNmk zbsWep=`c+T=rUr-bEp8#Y63gqDd$YBj3-20J~WN@^Rq*O9rkiA;7y}jjZ3RFmXx*)qMg5I$rj7vP;9RH28eeLBvH?0LR6v zPRoZjQ`RLLxp*gJUot^b0Oh8oxGZOm{uh+b?ivD!QGB%b_OMrrsw^lu;->|SWmmEp zf;$yk)I&otzwBrZ{e`0IL~H$+$JC`~4q<{f)*uj3u|Qi{CK!~n_Qh~f?l@s?OENop z4b$E&CbK&|A=-x7tR6XxDkZgn%ptgV;!;q6#*4^o%+$qnS4q}BM)#&5=ZTsZj&D9j zP=2Q+JvpsEv^T){{H(;hO;=6ONS9JhhDsWpgqFfGu~;25L^s9Va4caqGnB832}cTE zO5g0UBcHVay=5s93o&%MjHZeOcV`~cnG%nL_DZZUfsEuY3TIIr=@is(Ti{U6jap2P zM#{shgRW_TeZb%#NM85Ap~u;5E$B3&SYc5za+g~q-(3tEtlN-Z0C4Mc?qDFGVWA&% zl>OT+NAZ>4=s}wE?rOgq?FRJ*C574v2-eSNbu2lDNkg-=JR^;fF0PMwt%=MDB%a8N z+x;h2fA6L>{S22JX`%6JVC<LD@pl@~QwAEoKrRuG2CSN!XvEEon{C_zOv zo!vFE>rl#~&&beBleI&Ch3rl1~F>elSDGGz%Q9t{)l%FyQCU5WVpi~!C z!wut-?|M(4H=cXm-o%ejPQ<=bTz%oR26_{EWP0kH7qi&&pz+O(<#AJC-Xuhv$IFEK z?*1&Jc8|+MXi#Rr>*V4rf;(-M#Ksu9630}LRz{=P>h!n7Uq?Y30waPyEulnc4m~s| z-cvG)9ux(3^Bv_SCPGtD@gE-_N9gtj`uiRkmncb54uEcM@8Y>8EVRtbs*=Kf4hw62 z!ahJ`<~&UFNT<}Zk+K&vrTNO30{$_jh}M18kB0hk*M_tjc#nY}7W#9W!5T#>xPhFx z)r!JAf%kOdSaL3x71tagmQm6Ue^;v4%gYOw-?AE_IjoF-c=qo28Y4|{Yj_6%4}&dy zU42pO7h8u(puDhK@`h*It6w%2j7q7*+k=Umwotv{73d@wxzTlAX5rn28htnD4>Rka1&@k-3O`_A;<{HhO`~#aJf(*i`8F?yo^sLGHx-ASn%9a)Q7>cU{z-vp}`wS{o0uXY`bO5fdMhP}7LZ6uol1v0)M zz{iIvC>217b2U3<#RhuETN@}BI!WrsNg4{-$z~(h7Z5VV<%Ge(No36j28x@FTMQ3u z;30E>86X(7i-a%GH{WBjsY$de!WV5qdl_>vHwp;sg~bqU>^T`lE0-h;;CT0Qh~D@( zS4YSG!W^J@$%hW6dDjyKG*dEi^1x{@>Lw>9uC@5_E2a(Fh}QC#5F!7yKpKNQGJo>L z9bZj#B97)GDq}=A_VesO*7~#z$I|@<=bEwAxlVrI1`;srt+`PhiA-co!LY*ZF6-GcI zxMH}R_3jQ!&Rm`en)FLA3C16dS)hC{DM%T*9Vw!%{TcK2Hq*hsY{~l~5d?+FveM_> z{kv)9@snQmCv0DaOb5B(Jx3ug$H%?=RUhLHyK&}cVARjuU%cnmLMIO*Ffo@yMTeD>a#PFmXc}~9H=qd@7Ogu?JH1DYBIz_c%!YH zFy4f^bVrumzjJMjPle^iPa3a&-pGaXJat>8v%-?-#dkUxwsbS zkTFjdZhv+CPXJZ}sr zqQN|ON@@fiL^{#D|1I#GH-D*cha=5LT4rYT5(oO2yHi!EoI^?^6IF#`tVHCBVAo6B zXJKN#Kk%A_1r9b~;dLw&EWR?OL>;hzkX-#L=iIG6E?|aJr1t>3OL{D*6xNTKrIJa2 zeHt6;Pcc}6;h*mA*cY>3B0Yk1P9mj#sLBf(fv(uN+QGqw4S)=!>{n?TtB!e3xbGLYr^XC5q_C`77T(YJQW2STTd$YTQmLw9@ zpD#3Y4E6^A2W#KLNWHgPo|R@U~#b+dzXkxZ>IGLGc!XdHfTqz^`j$KyHr)$ zNk?LgAYR+O=7xQot5VRy?7k|O|Dy%g`4DJNCw;Oynd-wRN@@Vwz-}{mg+kT1jWF0h zNaKt;5B@4My*WxmM^{IgL8R&+h4d@nPl(eT?v&b^OutY)SY)2d5q)5TCmh?Dzy8La zKOxSHZx&tTZho`~c4%0j-F&*wKV8w}ps*jRsjiFp~=)yYWrmum0~2323B z*@#BXbY^2?{V`?LYweont^@UlDGC7H-uquW7ky4pM7j;Lwj2j8_*f>GLYUb~AStYJXyB4sA z99j>&v9Ym!ep&NqY;0HtngRUQIo;z!ak7Ln-fOTBvyP(oUBZ5sh@!K=j`hX(2A4=I zkoZhhDm@4|L>jokjPYZKp;4gk{{xPq14WAf)ldKc03~!qSaf7zbY(hYa%Ew3WdJfT zF*q$TFfA}RR5CC+F)}(aH7hVOIxsNB!0{vi001R)MObuXVRU6WZEs|0W_bWIFflkS nF)%GKI8-t)Ix#XjF*PeNGCD9Yw4F(q00000NkvXXu0mjfmS>$4 literal 0 HcmV?d00001 diff --git a/pr-preview/pr-254/assets/MaX-099f261c.png b/pr-preview/pr-254/assets/MaX-099f261c.png new file mode 100644 index 0000000000000000000000000000000000000000..44fdbfb3f7f6b911ac7f0d938ac81ee29200a6bc GIT binary patch literal 26127 zcmc#*^;?tg+a_i7Kxq(0gM@VVfYFW8(n@!Y&e1K>N=UbKBi-HQDCus2?>_H8@g6(2 zJ$s)0vgf|9`@GJ$7pbl)kBdc)g@lBJs{jFOA|WAnAddY2uMn?m;jiQoZ)nz1DpE*D zHSyU0OwkdaY0M#-Do98^j7Ug14Q&vv(^b;6_IBk#&(GPxKWsaQb2ONpF>gqeht7TcsLSVz^FfLj-VEAe)!yo&aB$mnkS1Q zQJz6lJ4*xHT11Qs* zLu$JR{pYu5My)%^L%K|#)_tDk&CAZ0hJEu4vIT}ulm8rQ(r1W<6J*}9s33p+=H6tr zJdS=4^p>ZyJF9v0eSjmf12}tog1$uGgoIxX@~NEa5(LK4N7Z%@cm^dl5kl$c(>H1U<+CvJNXYU(W(gq zt>*WV$Q;?v-k+KCEZhe1kPcM=2m$*~EYg($(Fq^EHLBL{cElsw0&8RDnUln;P)(3> z)A{0^sq;wB$Ix=niPxSh=7HfCQ#d+b!7sX4Y)azzD2ij=gR5h0#~2pt_m|kHZlu9@ zBZlSb4Y&h9?@?F-j9kfCs-L-vtCtxIVay7EuVI{iFDv_G9A(LM4bH zbk%|MFAV_t*NHRt4_vAnf24KH=jSL5InvtH1+y{OeX$+An?O;qU|1e}POoyn9Xn;) z1ch2ifVF@0&8AKH$luILbI#>=1%y++?T`jMiPEOabKcs4$^W@-GFeqTJBE6hugWva z`d+wzHPv*8ZnlMIw-u!Anu`Jo3#49XTFOY)W~}+EA5)O(zjcW>zl7hSE)#xlb)3^7cD_|ykaF=j#+90-5?8WPEWL;rd) zdiRG-sk#(I;YN#C7%;i!7n;5HxBJbo`_3n>b+qC)Xvo3<4R^keS_c}*hoJ*eF)I1$ zauh%hkJk+wNvMHWBqhX3Dx)%X4hz7Nc*M%{Zo^|P3c_XI^{&em0Lqeu@d#Idas=R~x6m2&iXwPiLB4wBj(Y2j=elh1UER+I|_ zgqgT)aX=lRIrAUSgjS;yC}n{HL7plmemG*^u0S?s)E^@f{3K%@@l+!_hp-95KW_{; zd<(1dW*^_>D?VV=Fp+)F%Ca^U>v_eyzFsv+hM(G`hDnq1y1m{F`Q)p+U|E{fG<$ia zD`n5&@g0WhCY4CoTT(DX@|IW#LD0C@$7CBPU5;+Q8;v7|%ccOVddhI=dRf^ohd(K+9Tx_x!t%lb9V&I55~SMxxE}gu>u)*#xmK@=^p-3u zDL@CSLgrU)e+D1XZKS<+`!k`rPDlVg?xdE+>7W zQ<~C(gC=x)PocI@9aZ$p!+oLNZ%r5;hWyw^QL|M~<`>+Uamd#&MSAu6x#CiKGh~rYjt@e>#&NN_?gnQ5!sfQ9LiCHcf{aWLD{ov$gbCNlOGO%AC4 zoSzyDMlrWml4&SQ*V$0`pu*$3?xADsXhve{?g1rFzfSWB zF%d&tlg2SxTBpcA-S536WhNG*rN!IGZJe$z!x%E|wbT%Y`=ejN`Rize$)_sgsK;K9 z@{;6{^##4oq-`a&Kh#2D0SBVXmtcMJLmBPQM?P~pf)pPT|DDt=UhZknUmdu&wVQ1D zU7<@|5^|J)FM-s)q1EDt!s+0Shc zX}6H_hNG6J&p5qZhCPnr^{o2`N2k$io3XW9r4r6n;?*pacljM?tjY^SN7bQtMW`k| z+_$1t*wY>fdB`50AR|lr^Vg>D(IsuN1scW15_qrX)Z8n}7(S5DPA`7x^)4jj1%Yow zrCHr!^cgRcKWSANA`-mX{s2{HKk(7+_~H4IEaJ3By zX;z6+R`9_rE*6>b3(bS;npFsTvG3qBdbKUd-`$&D4;up<=%Ce9F5=W`OeKBe-XPo za7WI)m$!rRyi<4WXU1PZpP*~<$sRpXtFclU$PoLz7!@rur@iH%r*yV0gI2@|&lTrf z?=deBU)bj@tt7YJvRN&n?$&E9r`0Ju+5f##hktsB8hYGVU0NEkHR+-z!+2Q#C2=hw zzb!|h7kZ8x`k69(`63B`$sx;s?U_j0BTcvh++M_x0rUBwCmuA=YJRHp+~W?-@)>o{ zUZtXa%#VloVFyt`l#O|4y@^Nu<_!@pAOFikf8SRw51!Qs7#=MlON??{KbDE+x*^SW zG^Y)!^C7#*U_eW&8XWS@YQGq!#hJ>9;4>$`x#%N=>KZ+}QRkz4W+zl5O}am#C=bIG z%v>;Ix07U0Y$H+pVN>Qzy(aB+EZkjh`!_TDjY~QCJy*ovuHSlXG23Mm2wc?yx>4PkeM(MhF4S9aNf|Cj1GrUY!?NM46yYcUg~VC z0~vNIWoA36ktCQHJ07v}FJiSDV%t50PWt)s$XT!TRnVA|3mL9zNGAG>I#F~C#MbkD zsy`jMntI}|Kg$#Z+=5#2gF`wr7t6jT+>LKFQ7q&g9889U{Em$%qh#cjDdPD!^Yf4B zuk`H^HS@8G(P$pQ74WVn^`BhPkhi3y`0x=G8DdcFJuYqONC4_T&aM|b|7V6gm)S1I zpnwR7kHGtTwvnaEi^|C0C5!~puGLQ-B+foxBw+`d4%D}>83p{CC_=k zjHhDrW5r?@H~b?K2mzoRImo$QDBxiT)ZW@fv5phwbQIkq`wF3<%5I9cmr+S)vzPll#Q+G+w^pUQznG!V@oBQa**o4ZYkcmmH;!9 z^*?0C^Z#K^v*L)#CKT;0yFl$-ziGLr4^%Y!6Lq~59Ex!VQQq(cKPZ!l_7v($7IlNnb&W6FQ=Mi#Y68B z2<4GA+|lt9mBZ;Ge}Qf~U12fJQ%IrqS;!0$MaE zuw<)|poEtz)vgI+P@s+4d3|5%BHRIthlidht6tO`nuWcJ>BVp|6;~-*8zMSgg`CVdOY~}cO3?2 zfK_A0y+8R4i3~B|K}%u9B57}$2ArG-66tLd5(0@JgWc{0jGLdW6of(qTV`n+Mk=f* z@6Tb4z}hO*ju41*;cgbVhl@Dl*;kRIJC+NXtK~sd$yB9ez_|U5?U1P3rV<<;iK6vR zl_6;n*VT!?syjo>yiNXkQ^r=Em^Gcg4FO&Qk7pg?ieCBodAm2OOY|FF|3Uk`Y+;x~ zGzb9&@wN7ZBAyApe-z4xl7|~FQG>tQdYQbXud}AU!i$P>-3DrQj8hKYdSMcFHlS#5 z5=Wa-NEdid&(kE`-O~_GKGPHWI5o2QqiTHdNT|p#4(Ywl&Uc%9Xg>%SCSXdd8WZyb zXS173gDiq^oCTcc25AQZIyuvu;t7XUx%9flKz z=k3Or-4gTm)SrU~A*C=6jm7~E+8W%}MjdTq?0|cpi zd|>ip4xX1{F9(0Cl+Py|OFeCUCu_(@a#>%D1-8atf{Vs{KhR8Hc#dWGqZ@7@xLKi1 zne^=7N`m4^^5|-wd#p3H>D8K(JvMZVI}5i=wQ>6j+?x!k~ z$kH;ZJubh;?}Q3&%S@Nudmp3s{@8tt@wPKuZVg{RA)AxPUepN=OHX#|sR<=x@rDChlb|X5U7akQ( z#oL(Rzi0kWW?<6DsJPT7SrXc-yZb7y(jeNd_e;3LO=7;>tHhjV%$iI;%jMl38A`!r zAj{u4josAqFS39w77ML-?jNgQ2u;-Gh`KTEN<8pfHb~tqPbF8Y;sPXP` zJf?x;Hy|#Q-u#A_)*ft}F^7}tLp!YmD)1yqoX!B|VP0zcx5L`s#R7GutYfnHk!k-t z5uO_!4Sv}I9n8i9uQw{VT@^q#Z5tr8U-%jXU!`G-WR4*4>%zM!z!H@V9Ecz3f^8z?+zgnrNg2GGEWfx$U% zd$zAlvQ;GjgY0111Pc|QCwm(Y;|%dq&f$6CSojbK+;bdy<(p~OR?gr?xiLy=(C~OG==p?;zj-kU6Os1fIvRmS2yr`%WP$BcS7qOGXz%v zHLXV7UV{cdYrCtMa&z0LXOn|fDu1ulpx>U`OV@g!C&52OA?85BQXRh7wSJa6^A$iaQarS-jN%qN@QReJ-aTyp>X6)tGyg*4H)8YK6Ie(LrT2x8D#94 z_x9@#^~^Opf>pI(*-nXO5h(%ia%t^4(U&U+p$@&KDEk!%a%ZrBU$t59VxQY8N?CQzKB` zYurgli6Qnj)2^7o!FJgKaTltIN1r?fPFTXm?+_T%(;@Dbf?{W=JxRE0#!!kmZUU-< za;~5Ago>pWy8R-VV7lK*tNFm0s%?c1Gkg^Pi-b_Y=#Bl!ZFjGtj)MMFaI`tbC3^3f zyUF9TCQ_R0l_PwiRla`y*LC)$oF<`l9t<`Q+DlIVxKbWcP^hFg%{jl(ahWF{oV0H| z5-2M4Cuw8p*v?Ltr76@RcyWOQim7T;UcNs&)TCxNfS4t= z-G*+P3PNl$>cYv$13kvS0(*JlH^tj(xFf#CM0CCY)(0LjIgF8CPC|CfdtqG9qz3Rh zHiVW|{ei8=xPvrR*@7Xart7!`N<3d1wwO8Z3pKy(0rnv6EDGADCH`AiEnW8_L-1Ob zo36 zkQEtd+$v=?k%kaifel9T49WeKBosGhy-g-u;=Ka4Qf9+9z?0}$0 z?^+(7=)ilPze}y^xB=zK(egpZLpD8M z4=rx|ElaBwK4=RMJA9KtC^wiH`hbioNq3anp=0`-fIPTimehz2q6*K&@3DMdF|g|) zT)h)$4EFgS!Nof3&2nQorz_s>FeUC34F>VZHLc_Wr-q6F{r8@RKgDwX~p-%Wh~xvGFNf5p-i??~r-fU>@~@ttRVaoLOxT+)>y>vhbypWFFvsN@}uxQ_iaCaIvD<4L2G0sh_IVZBgi&SfdO(!gY0iG2f2l#F-+)x8!zy zlUiDuBzX7%PAwi3@Po_^R?6Fa=Jca}@iqzbC|szCLN~RtmBeQ0?@am%^7NL83Y;Uc zYJK%rc&WrQ@=2XYZ*xA74V$mfxJNWaM@ndRimFqwDPerm&yN(k=5j<7T}?cZwvqPP zS|tYzk34R?bQGr0RUka%o(y66r$sV&bWiwlN2Z5AP$_zUEk?uCxlMQ(scE&dz9_F5 zAtYIuDD0y!10mewfxJo*>hh)kuoGKcWrmqIMoL+bA(g#<%Bop4A+6~>CR6%WTT8DH zjY1*YBPnY&*Rp?;R)}klt}u*H$d%2lia9lq|5C!fUzCL9EfjUAacN|djVkG*ieyXD@P$y}Ppiykej3Yo$~sk4i1DNnBxlAgy+(e*F^Wv< ziqeQD*MWX**^-{cv&7<0v!8C}UiD=84iSB9CPxj222rAr6~|swg?Zvl1KEq=wviLd zQ-|!FXda&kKF;=rV{fFIwdhp^M5z37^a;V%j<@ z<=@85q|kFwZI$yyv14z9Ha2P0j!?;O^5{Qq(NS*jgcl2ZNcKHuDD}VMwpmVrU#eLH zG7ZirauBXb_}Y8de+5((-Ok;&fEAHJ-y6R9;8%!m657mPP201F)mc*AMD=~b#mq_%!j{7_>Z%R9!rf+LJXa!$K^q51IF(v6O-wAMTFmn!1P!a<*t|_qU=z zrDEQ%6{6PrC-sxMK0|y%z68HpnDh(z$Wx>s%v{uEkI<3!QL^ga)$ENker{%r%$IMG z4vt)#35w?n;2#gyQ8&=4jXa^C5p;gLCJ{LI9^3Yra@KDc>f>(VU1TlWnerMqBvuPSzt z<(+^CKbwPn1XcHwNtbc5)KGc(O9dHpTqRD0q7VhAfD509^~u%GBFrrqhufP_alqG>3M zTYpHkT-HlniSwg=*~CN)({Bks4u-~+6D>KyY>(tNy_tPw`MKwvH(`uGS*e6h?(^M@4{Zr;6eM0i1gT>6d zkN0BMe!O`ZD(M^bw9{ie5cF$n0>XA9%^`?VBKfVU>loQ~^`!j%?;g$2Z+3Nz*MnAm z$z=H*tLZ1wI?Rl(DX2!(kGbcDM8#(_|Qoedc;R(e86lAv~?B~V8+VEWo zYS)4wp&C#9Tlc7=iOOX__FhHTCmgWw8m=2jvfUWv>p>{*;8{pM8@TTaD54GV3vI!y zeNk`ZOUHwxv*QHMS(LP;yVDtaZd8*v#n2g4cjk}Cn(*zrB%HB2pT53{pc+z{pIxgv zICR!!>(;9)w`+(y(=Q_hv z*&3QkTGs-zj^+5?YZ`~KhOT-{krv3yGaU;{y>FuE1c8?|!hq+d_E+6a_9J}UYjT-5 zk6g0l&y^-cGnVDAH-wKwSc;3gpuCNXNRk7%1^!^#gMHK-{P|3vDdO}CNP{+q(iWXU zO3twyBnEL_8W<{l@3>P!nLHQ-2lX=EV1xUh11&rC2G+||Qh0Ry(m7)(OdSJ2zEAkH zxKH6dy4SGTXAJOi00C&&5S|$V(N+7_`E&{^Sjb&Slcrx<0A3%_g7Z2;?r;cqPn8mG zHyf>DbwQ`vJd-K<)WOrr-S|;cCo|~L0 zISVxwk>|v=38z!jc{5;1k8$kcqqr=6#LBohO#8wbW_12K6worfy1=!-iTq5S2oa#U znWzeiSHc;SK5}%29RGg34f*Qb7dkT-{;~j5{1qpH>CJwI3hsdpTnVUu=ZvNND*k?< z3FfAalMH+`N0lz_l%YS*wqm1-ao}2q#0IR?>=P=k3?F7WhU^RuVRc%V9sTDQF-U()J_i{A>%<1cOB(%H;p z!wQIwgJu%Oo(MwQ{3DAY-j@}PQTD$ud+)8eE|lZwLcOQ28qqYgmKpG9-3NKvER4$>|pu8O`HWSp`^k+AJ6QRrf_qHxsApIVIJK+_sc zbwxiqMcpL2)zbccQLF-!!;n_c2TSwmQ*8CJJ1+t0wbeADW&gTu%bDo=BwNlb*gol| z^`&Q5ce-;?<>A9AA+O1+5h*{It)cx)Y4s1zjydVgbZqZKz;$fGhE%Aa2WXr+ zmloVa?5sKk5b~Gyo^xV1bIdkvh>97KAWqfLsBB1-h;qLMmoB)#sf)ZPD={7OCU%E5} zjXhU>=!t?_FyU*U^h5O6hCK8)WsRr(m8d{DS_@NHz=ddcs;!{G!jP2?^UsO;ngzB2 zy+%UYi=t3J)MA>ZMr98D{C1rAm=eV5Z9b8!)so~11g`(35nKRIu3`nXNSGm5Z}!_a z_ye)F#ScUHvh6~y`a}HdL9Z?RAI0}QB^cXWbPQ3BA-4Qe>G3N;$1RRX2ze60qI)w$ zv}TuxEUb_xQCWg{NQT;8$etJEL6gh_REOW%r@}eT$S}bs+clcFtI2>I^DQjk(2hNL z?*0{=s~PD0=Kf9MH>^r6WQEUSdR0?{KWD?wP|NN;82HQb(@XV^5i$ zeEbbG4UKtH@K^ls5IZ>P>G1U&^^nhcBE6-$5V`V9I zC$C;@(Is}%@b#a$LxfjD+`y+t?D5;~C{?M3cuYRGoo2}->GyfZxCvwIjaYfOo*>0A z6T>_z%Re5KI}#41K3J)K&84zMC`kAUw6Ey&aEK_JWAt(|%Qk^b1IUYyz`jWvU9%v& z!G-$4x$+dZ@@HHJlw->?P9uce3_uho!o1Q=Ny3rzyoLW4;J730uIEQ|;|zB2Zjjhu zQg*c6nE_Xg8^e*RI9$2|3Dfe7Owp-%oW^!+!@enm)TOR`mHzOl=-p~cee=4k+0r~6 zn&w|*d8P*5JdQT@>o-jcH1Kpm%sbA(@bI<}FDX3j_Z=k`_Ffw_4OPs(c^IHyr!+a= zE`caYd)EZC4Yi)Y3+O8a-fHqhy9l~AlLioWj?H(B` z&sSWS{w1x<+>t#%VM1EeDxUkIm&7}akwE8!{Cnx32vCUO3Uo8V!R(7fDwfE|`kYRV z=pVjr&Y~l3WEf8%^Ev$2?nM7mTgLZ#qKC&nZzJ*9j{2~j8pPxW;+41e{B8jR7sVLzg+JlEp6K-+_mlx{xoJbS4o-5$GvNW^%8yk+lUTot{eR6N8}Yf zy&l{AF|z6E7IgEALg-d05m(y8w6Zlafzl}=ZHHmA3Wz^h|MPSXQXIm42qgoh`&o^) zdrwqj3Hfd(!`r_{u{n3f0-ZU82{7+*wUN1eON%IZUXmf- zCL-Q=Pspd=u>nAWGogrzKGDL*A7lH50XAECSn@_rK-xlVTnc|Eb(>{Kf2c zc>6Ps2=oj;o3_nA`rJL`U28RAQJUfp#<@*9eWhzPf7oL-_kl{qIUXg`$tji z%D$@*erEiBYqeyl4)-hJn~&K?2w$6ahPXu9+VP(d?%G-{vE!Ue&$|WCvBK(4zudQI zQRON3UW2>dQEcaLphNjxw*~i0!_EA(Jh8<$95kgJX7YW8(kkCBxR{Nel}Bvv2ZFKr?i3Gb3_iI%!k)m z9#^ju>6k>+`JqNyQ#0Aw4~Tf-_?i%wO9;Y(7cqbXv5hQhzHg-gUo5s%-=_fWwhNoX zAubUt0s2Sp%}K{ci2EE0f%``i31h3801e;eYwlZTpqXA+f?{U+DK_bTsXAj)n_*6( z(U}ZbzZ_j#H7iS+f0GeBY;5QMVc5KG!*IjmL4k&KvJlqdEJ7>B!Do9$0mh1+4I)U7 z?>*6pfb`Uj`grXA6PNz9>;GB^&5mn`|x1`6T0r5-q4!PG)D ziH94eveyV1opw>?3&^$oOre>js#tmz`if2mC{imb=hYMrY}uz$Dk#>!70YFD3*Wu_ z%#M7>G)`b>AEGR$`lP4=5uemeBGnOE`6l?%X6&= zAOk=tZ?1zp-&tmj*`6qhd{4q8oDh>+oI*(Jv@MO)RA~5`n0bco=GrwsDC?lGf~mMg zoufAX#HcSwOQGW$;#B%wXN8HYHJ%`_OESC{LywR&SoGQeH!lZDnb;#P4>^k`V5x|O zd(<+A3={TN?j}@F0u|b)zPLo)zfgwW4OX!mlnJ-AlGt-Z`26Bs$R?cVA6-#z zC4jk*k(2>^Featp=wPgLo!b%K>WVS{i;xK8S3knf+}~(Z^SK=eO{>b&U{v(}3_wxq((7#1k8d~D_>LJGndZCaNZ{47Rm$u2)wMU@y+ zq&59ro8S%Yeg)Uo1dT##zQsoK1U;{7X3XNouw#>_K+a!(vXdt&(tPP(4Y3__v7Vb~ zC&~DxoOq^=4=Zd#E8SurQ@Nrtu&NDYb_m zx%umcN^^%*33uz`N&I5jimwfulphva6%Ym6Gg9&WMCNo%gz={(J6l+tJ3Atz(PKR6 zbStEjO5f?B%Sm@1atbu=j0^ZnAKKk}dqB0ToT%lZeejbIY%{GHKpic%ontt5L7yH5 zng1#T_|qOzyk|uVz93DaxDtdbq`$~UPJ8Om-7v=5J%kw(Q-cn%0g0k9k+&=VdEr?~ zG!ze1db%#Ac0q|Y=giU`uE6Q$P>8g~XDOAFwo~|fZtR*<6*=v!MApWHUPA99n_iH0 z&6f?Sog{l9a70z2=2jf;c?p7~fhT`^B|Ia)m&g{-$g7l>GwA2fe1Qlzt?+AHCsQZ=`*Zm* z`B13=;h9*SalQnNLZO(P8++@S1SyN=shS^gT@^&@P(Am|8?ndMu@NXwZ{kb%GRV?1 zPZ4^Ytr;~;^|Jxg4<3-|q6V=x-wrNU;(@?dowU6TR0>b2)Hvd(iFf)zowC{qsBgq+ z&|qjcfF#mK0&vd}n0_J*HvEek^v5mCB2`Yi6HnR&Iq%t4U@KFh;bxtxxa-h`FK@Rk zY?0t4?gyUg{UA-jvjm8B%N{l}BH2t4OVTd~ccpY=ltHBRp3|cR4dyf2($VLqYO5L5 zl6bA(rRCvM%HZRCm6(#38VHJ*d;i*c<{+=37vMt^7Vv=5-KX?7=dHF*s^E>!Kr+*>R{0>ZISo>Mqq_uV2QtP(YtwkpBULeyL0^BlAw;08lMutJKVY)5Ey zp`fTct>6_RU`gNc$aZs3NausSBit14z<8cLP)))Q*!TWjvdz=*j$6)t_*x*#X8G17 z*EHS_u?wcsjR*biE$x_Zh~4PIhVNm8;k6l)W?=_9Xs`k{qZqmaC~>1b7{QB5Fvb%F z6D`FgOv+fe+E@YeyigLWtv>PJ8J4hFzvb8C+g9cow*td237GP?tX4O;7MAb*5_O=Q zG612{w(kit-Gn?}9h-POsz@C`Tz**RlCcGGtz+*p zCPbxK1}Mu+i4m3xkVnO9(9CL5qMS$s7qWIhn z2MnF4WGdV)@OQPuTAU8gP0gQGYeIPO`5W4>e|*w%_YlyR#(5Chy%-qx-r(|<2=X;8 zSz8rALvizEW)H7d&hr(hQl~0k(zdFemOgR|Y~A9n-5+zwl`Wj8 z25Kle&kZl*@6)}AQSiXE8ra7p)2axvnjwqpkz$S>Q?Y{?{AB-GOAOcm4&%gR&&XJ% z$G5``|0!C=4eFEvtnBAuMVxu9kD>I8C@1QU9dVCGc-liTZ2F4WPc8!cx3`4G z&EXA4k1J3f5{;e61DjPU1BJE=oKiv}>*Ag7?ltIzeB(doK4Mc0^Sgd!r@wzX-ORP6 zwL4A9h>P>SW$f2%fiQk$9ZpWCH9&?6ayHYJy()vN9;KQVBaKEn!qRKB40EqT}*>qPvA&2wF?0<-@2>_L_9T zFB94y~*R|iJk|RJh!eE*}AMj^15p+gy$LUk;Mw^U@_5+ zn34!MW(7NV2Rc~pyuHXvl4y3QoZGh^^fz!eBvi-OeBKt&uMV#Zy%TOlEb)wj3o$w- zA4DU5+DBXOF7MnGjT2;bhDIsAB?>;3(Lu3(!ul^H%gBmXwcaJ+9e1|uju~26LiQrl zzO<>MqB9S@EG$Bp+sw=0qG_}}&P0SGUEqJu@rTHJLR)7J;O!XKdGNuSJ0T$>3W4ED zJ(u8nEG*&F{teG>qtM)tiekkU`rQeZvT0qd1LwEP5O`z+BD6!8)yy0KQHFKhFq8Zt z+D9Lyp{X#z}QQF%9NUW9yYl;TfKy?7I2@_KE z-^8T&ou0pwnG@y7`HjY$5-$}jxWf)=-U{)ysqsG&Br;;CDaB-+ zIxs8vN`WF@*bylx@~jG5M82C-j2O4OZ{#jQH4o!~NzPccV#=7Dkbk@(R{LqFOs-we z%+C!nx!SX7@KU5EZmyza6s+RI2$?=ijUVX>F$GT&f_RA6nc>k=w`WP@_;;i4Z0jK zYzNEbj+r;U>)%uEs8=IsdW|v^@QQ#bWedQ*yP`R z5%*d+^|cB4Qtugpc4z-^YZda})vE1Y zd!ydbSc}oYqqu+J&`ghU)s`N8rlQ?{Bu?OPJ6-LmMRfO$pi}vxeMhV%U<5Nir$m(% zwKVq^*W5oo9R^n6EKdH?+M0F>(YPQ?lNT3(lH8}Dvy6lsMeR)5 zj$3j88@zF0+IHuJhhM}KCKb-}98Bju$p}sKN9nK8cIlWZk9$Ck+ zO;_%Gb=RU{2{hN%1jPQA$tQ|MSFN+ti z6vOO8L8YvbbT!`@ z!z7K4q5?5bEX1Gm@zC2*ztcJ@<9?M{^a&X~74Ow+K!UDQLRtzJ#@!JKIfnBdEksfS z5pL0}Y#jMtZD$n~)%%Bgq(Ndpkd7IK1}Omn5g3L>X^{SbbVzqfBQ-M;(nt>}gMy@# zbazNdw;&~bw*R|xaW2ml3)Y%7tUa^$`+cA1^E_`lJpkPZX$ifMd@(@T6F)&i^vWDA zbL-kyHaDG>4v6q?M$#2#-I>DLtX@+Xr|Ov}wRlnt?-rHr3VU5N`U443j^^pNVglVc zr@%MYlyrFdO!s!X=^t!V=zk)X+>s#`2~cdk^iUHxzXb8xq>*q2^pW+9*ZLt5?u%z) z=NgNfy_Ie7Gslert3PcT-m;(tNf;u(}!p+MvD@uG}U|m=Vx*|P?jX{%7u=Mrx zzG=MWv}=e8S!KM^PtwJr)1dbI2QtyG{Tb4$3n!Bg+tdfVukB%GZYpDVcJc0>t3F@1 zS zSP+403c5jf4$rZAC9^-wS4Umr znqmO95LLc=S1wtD1SbBAYGER|>Idhg#Lma@0PwFr)nS?mr%jgQ~knd;z0b7kRh?wrhj~N>pQgVI{MuALVcqF!gQ0R${-<{9Q4kY^t}C zGo(5DXCNNeM$EVnUOqdYr8F;Gs;Gn<_llJTUrEN&M9Z?O6RK^u%xTw>=yQZ?VVjOX!m({n z==W3N+O<*d`zIGYfJB3t_R^5F-P?AVK*~kOtgddRgBWp!cw4}(Ji@gn^~hRv?k@_EQZ-w+4VR9M3ke^(H&`vYiruwB{U z&QOMQc2{DhrH6?ijc`usBEU!jxTr~iTgqDgLemi$^Y;mR_WSvWxZ1Z8!x_}2DrcBq zvtrZxfK8%L^e!33)xLCBQ@awml00%FWI0NIFi?Exw5&eG(xN)W5>zjoxl~U$f09>~ z0p!Wn4rh6mDmR){ZbO094zSFSm)mNdc@ICBG3#P7j%;YsW%k>ptjNr$bvHC8I6ee! z(Ea4Bd*@8{A3*9fEYR7VtC#A_8|8?8Dg&lDl<{q@Oa zOu}c{o6ye(9PM+PHA#yTMqv|ZVadu<3#`X>xt=l%-1d#Ru&tE-3Yv*1suArn^X zpN=q>q?Q=~ZX&k$YU7MImRD7#oXJ+@I?;l|~xW9o;%Lm~i- z?2jtKTDiofd4dE|Pk^+6*-T}({@g#e#OIfLBn`y7EruwD-bshNvM1Lbfc?|&>vC2u zIoU6q)EIau5(zIibj7*Slr;>FE^Zj3%21Bc#x+tdgVbjq41WCbZgRhd2pH8OqI#vBNx#=c8!hRLhkl~}%aa>3z|1?b<&v~HAFjlnm@8wScTJu#M;$&s` zQq2TF^S=>r;6g890JCSY6*-ubRI@qsV-8anxNmmP3NRzMX;pb%f@1a(Y-hydR#bK`)<;H{NgFP)NmU3l0=jzw6KVNB%60?P<8E>5MqLFe;sB^TYO&Ax;#(}mp-kn%z z;p2x$#snR%tUtV}XCD-W<8XnYCCJ^~U;p!`FsELfUK``SeFvvW>lg*F;q{}Br;Z$B z?%WU@l?~CpK{pWLL#N5qoqA@K#OHH2RV*q+Q#geHkCV|H=<{Ld_C`1`d7NA7wV`XJbLMJm`B~vZ5a|eE~35=3=MddY1q_O5%P9*+m5e_uHI#n zS@&oO%VU#G^y_G90Sz*P*xA+x4wW|R4A-C4&b7^|1?wnbaSl@JYhQE zD}uVaW%jg9LY^P4He}%9xBuWR3^}gCEgwnOI%m=n)dPTZB#?pfG!S2!QED$k{V~2c z+(9E?r2=ey?@z?%N{>p1vb#!=FHb$7fJM*kby7mYUzQONWLLEZqZItD#KWL#B*_&E zh23oxSGOofo>U1Rl)YoY$s9VnF6C>P_AgBRX@=3mg^X(!FDEG5XKh%i2%t>xk*9{& z2>5Wr6emO-<}fg%IPM!{H66(C`5{^sQO=N527^jP}aXWjI#qpw4L(^{z?&- zeDW9!WKkd%XMnISE86{Ts76168y4xnk+*3pC&jk=!)hYV1 zxudi`|1^lYR5b&lVNr^@<&F%I@=_k{DP2HdmR)Tqso5Ip&X8`g!oo1CKQaN<%evUl zOZyVRKb;lnnIZth7`Xj1BwCyl zt}Qw**wtN!)hPvJ_)}=g2A1V@XSVwGfz0<2(eYtr1m8nBZe0xV;zV@)Z_qGJ=I5;; zA^THRH5Z3JP|9V$_#-2R_DCDg5Y^46!aIW|P;M6{NCP}GJ<=(702$H7PqOGLA3^)U zO7Bg1r~!paNlJ&|t#hH-yt|c7wNn3IPdKG)tj--@go(N6#Y>3j??%-xdYg^Wp)xR4 zg0qj8+kop^bXX7i`vj$Sf;C6_yo~kk*%2R1n{3Vm$y)fVXV9yP?mAj_qN=<5=gXG< zFXlFz7#0al_=q*7yqc)qMujmk+8+T?3r@68V^tegLlSR`BeY@lu_F}=&~@44_jCi$ z30oF-E|rYsAwiFfNW}aP;620rM!Y$+PXOlDkzv$$^HLZ5mBfdYAi{EFvimJ$V0CfYZ%lsS{$Jqt@L&IeV)0t(9~e&pz|F)~GN=HY z5D>3|CtyW>mEHTq{ItD%_Zm!qA4fUPIMyuaTI$wTV;$bQ4gRJW3kj`Dn8hN!9oe-2UCd z?$gih7DB{)*fQQZD>c1)xd;^uT-gznbj0Lv`uiglZI{-UHY2t`nNGgz+bt0$ znHWW7TmKq*oBT2$S-bi_$yX1xB6sz*VO-YjoHpFAI1z zSIkJj#C6kIaCU+hH8LQB6@ zf!9A8uX4bw3gUDF?bo}4LcQaeQ%sNik&w&+7zE& z*WLHxKc0i=v~|Y7>5+ay@ZGr^A%5A(&It<2TXMp5uMa81`Hi7z$b@@pk(u~Q)5f+I z_X8hko56-eNb9930}Ot!*~(AC-rAc(PqiZzmVh{^RkoNXf42-#2$UM6Wh1(O?5=5t z6a3$1NcT;v^p@nHBs}_q0iYpGQ0_4%T)rC^7Zi#ipYsK{%o}-5qTCSd04<`OjtE$C z_<3Mh@#C;HTkx8}`ev~Bs2mm@xt?*H)c1E*YZuDTjTKwcRn@T0TwbuAWL^+e)xqYK z%7x+u_@sF|J`D}3=vT9k%8T$KmKVZ;)2PBpzdz$#$ zmpgq#`c>Y;e7x6~B;__+MwDNrn;7Cn`_;0q+zrTYSG@)PIW4r*@aLk@W_MV9Q4XU@ z8%%7cRM~d^Mru&cAbpQ*Jnl-3Edu?m7W;Aqcz~2@Uz@<+zqWo+!p-+iB4!;!f-HO? zCw7yDbXZN}Hr?dV9x`J$Zn*aA!uOtjHb;s_TuaRG3N9 z^-hW>X}E|v^tjeM-ZXYmI3K6v^w~reAxo*Ln<=m^7w~S;?D(A+&^a!DKWi6hv#-m|sui_F+B9um-A{IR)=R zvcmqW3ncM+63~Fe?O+Tp11Y6W^lq9xX=-DKH^yhZj578W4S9kz`aCY6Rg3YfQWQjVgZw)c79ZeGyZ>kJb?S>vE4A zJZ!-=s4ieMXf`gOR9U;NaS!IW@DLz>J-_FPzwr%7H2v$j0XElqrzZa{SW;`(wUdmT z8gG9gGlLdaq$-r`5}BXHDc?zZ7w%i0Eb_5a+!NYIpa)KaB`RsN*}%LH-Ascu7xx zA+|inKE8T+A54BXCbB}XqHn#op8(~1$j2ZpsJyv$`?}WFvifzrsE5%Gt@Hv=V1;ur z#{ZX70CBk(9l?|h;ZNc_wC<3f^k&yga%#5!vmfuL-A)ye7uba?q^V)c7CE2?O)bn& zRnxkpCq+Ly9mKjbhFeMye}VIi5W6zXP3Fo?rj>+>C`JZE&rjY|^wsVq)UNA<&+<49 z8JR)$kVa(Lz3ps~jEXi$uiZY<8at&dKYGxPE1Rna;5cbSUNrm`a*NT}TM!Dm>pS^t zo9XIvh;WT=+Nr8Hx4bZN$#e42jTS(JOjh-25V2>9eh%7-Gu;u{$CN% z5l033%sNxbF%6*E9|D#prT2wRCj&=l+7rX3l(4WliF|hIPEmdQwdoXd${ZXbUn7yR@JYJ0i7|{~0!`*MxZ8@6vAb6d6Na za2RiSH7r)(vIHB-me~Ks1$>~c=riyArP7(jrpm{BPibYNS6ePGfM>{(8S?aJm+6PA zAcCUN9g^3?fSf?7AGbX79Ve&*tpNR6pzt_A zXqENXhciUH*er8b!@6kK+&W7?KZD`*>)M7pPUW9-O1+9^Ds=I|5w7>dv$4v;_mtok zN7+Y^gdC;Gu<%DQM-V6ij+5b3_Yu{_css<%7Z|~}*K6~!@2jaIn2KI2?g^F|yl_KV zIAo>I1x8`+DNGjN4@4!`avNfd%?kbTcHnqQCWNe0q0J9C zZm-FS7T#j#nG|4Oizx@lVHv%7nXSHh$?3|nV5Eh5n>`GAS`QTe@W#%a1RZa-)DAB7 z%5&=qtIP&J{0%5LCM;8*T_ z-5UzOD74zH8R1^(SWEJS-}V%)Orw-Z(kO)c^k~jA>og#gKVa}bT+||uk>&(2?IoI#W>Wnl`=2ER3oXG`XxFA3L}hx@<--ahNg@KEi`;J0??5-=RG+OTQjJ2Rn+MJw08;7kat9s}Wnp(p437G2ow#H4}1cHKBVYdeF$yK-k#6;R=1{%pHdm)U&DB;a73YEMSMgP z({hLl3s#UP`?I=$9Wf{pvwJ3--yf$Nsz%Y1xCNrT$2(%+(LpD2z(np%GWT35zQ*Nv zaR0+dWN?ZhiaINHBp9diI#TIG~b0n*- zpdAR$SiW*!$fYdyH<{& zpDwj_{;x%=$-vz{QE?l_uBo~N?&+2ZiJcVic~fo1A$PIC^Guhs#()oeJgyUwZ3>jh zgdQ=Ygnw?3^x7!zz=Yqu?qz#FZuC`;zRRvI&m#hpA`5H#Vk-II ztB3hRUo}nJydz4B*>Jy+)-0asJzHW>F00}klKviThr}gs|dEF z9kaJSz!=(cutmqIc1kd=gLG^+so6hO!4JF`B}$)1mrCWrbUU@o#tDd1Lav?4g#*%^ z<@AJmh(4Lftyt0u)qv7yIaOH z?6i7`^JJJa!dcvE^P<*Y$MJJ=M0%+(s#Y68)u+D)I=BWx??~SrdlfbPcMc%d=A~Dg z*-6LWCGf-TOVLOlhvu1)ns$STY-s=pVhlJ}2NVi5PA~z`2FiX?6gaj~?#{ZqFXkv& ze68}fx6OGWyC%|9UaX-9)HLG-4OivR$oTkv{V+>jpPrXptB_&=2@_lIp@{x~=}Hr+ zD+kJKq*3Zufy0dMYNAtz4o)U9T~$m3^E{{HB)uHLIqKF>p$uACXw{2a8#PdXF*_OO z1Y%1k61R34YW(BVwj>U1PmYL9r8xK#*G#Xze>6c1mYRrRmmzUqX|AeThvmZKi~T^Z zNM`l+|5WdSr8cIHJ~B3gn_tXBmBM48p9H<=k&g#?xY-lnl4(!SU(?Iv0Ld!N$= zX|6reh34b7kiNf&ZaO)MqypBjtrA?Q6Q65K#Xj@B!OL$9OmnLIP8f&4?R{YR=qmd% zphib46l5OSwc#vJSn)arUi0FG>^}0I*v-?PGm}JezNq-4+js06^R}iR4nx$Mq_5=8 zJAU$ztwmyiD+X*1yjdyj+UNcJJPack&9sYylKD%vjZwW2H+8dcWd{M9W zS;^5(!Wh5pcT{pZ4}YbIHEgs@eyQdAOx6BNrw|D4dkL3)oU|~EXt@OfZATX@U?;~k zQg^<^LnR#5Pybq!dH{~+bY!Xh`bKsPhg?p1b|xugyYNe8rNRX%P#(iKC8RpP4SfLg z58>RO;sIq?cGl3LSX>=^wE*TN@t(2!t&k4GMp;G|RaB$a;_|kvBasint69xCW+lhG z>iM-P@9Okj5S;%~9$q<%ny})g{1VIzR%JoUYL1dwT^%a!D~x~(8yK~U^vez>V;dHCX6vA}TxW<~6F2;^ zp(631VaYhq#j8LcRZ)I@s~$)zTDb+l#9?oLQ~BJ4jbv9?%})3%$Z3Pg-tUy!V}*@(zyMinfT-Y~5u~$54!H_pYI5>038NyniuS$CFy-^o%K+W|uamQ~ zz31X>GV+*qPt+M@0wya<%4w3D(Wj0(WP5|AD>X=GPicWzO<&z+D4)}<0#ZOy0+h+AM^Rv$hdLW3BgghZG;i>wK!f!sl*%P$d9AyMkpN!@cd-TayQXxM6qKKL z_ItG}EwNE%xHFNg2(N}Yt;=WpygbK+pZS84wzgDHW=L*dq`mzBb2k>l3JH5Ak)@>T ztVS_D5kp#OPy6N!@i~Gc;>#<|=hQ|*U%;=Pk!Nb~V?s7mAw8k@F@@X#6yR?`F$ZOf z@obQmDdoieXbSGFAdwkM?eW(!<*$&wmIdke3~^o7Pa!)lD<*SoD3O1Xq1DR(iY zS;NXd4IDr7C%X^)q>)da?n>4?^df~2y>6hMOw*C?RB~cSq3yn4??<9P&hC|d9gBF@ zDYXs%P1LWrhUDjxGJ#Wn7fG&0Fikm>7cQBEI(=JaB_q?4o45c`W`&hLGv?jqHBa$p ztdVO;e3l1CPv8%gP}=7Y#9KeMyzbBS(G3-YH&cp33K0O$ITur_1XOG=L2J&C&9)SJ z&M{#t0|^2~TU04U^t`@7ImrQJf7QCoE^r|SibZLGwNawI;>ARrLl?L96&^jj`vfvS+cXGrT$2=RPQ=YlUhN%N_>q9n5jTDV_A8f!x#1`Q$6MgE4x*SQEf$4z zv1Z~0C7vc_ej{CNUd^S=)Jq-N=^Zw{DW&um(wMpuyZu{<`>HRwKQF1u&?O$5qmfSY zTB>zi*+%$mbkuUtmm5`YxFbl|dsM45KBSxytNxNWiPcT1Bpg$q%ypUmYUg#H^>w>A z%!fnCsN(PC|Cf_Ox^W`zS$;3~c-pqlV1UC>*gX|qdsi[s]})}}}return Object.freeze(Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}))}(function(){const t=document.createElement("link").relList;if(t&&t.supports&&t.supports("modulepreload"))return;for(const s of document.querySelectorAll('link[rel="modulepreload"]'))i(s);new MutationObserver(s=>{for(const r of s)if(r.type==="childList")for(const n of r.addedNodes)n.tagName==="LINK"&&n.rel==="modulepreload"&&i(n)}).observe(document,{childList:!0,subtree:!0});function a(s){const r={};return s.integrity&&(r.integrity=s.integrity),s.referrerPolicy&&(r.referrerPolicy=s.referrerPolicy),s.crossOrigin==="use-credentials"?r.credentials="include":s.crossOrigin==="anonymous"?r.credentials="omit":r.credentials="same-origin",r}function i(s){if(s.ep)return;s.ep=!0;const r=a(s);fetch(s.href,r)}})();function hm(e){return e&&e.__esModule&&Object.prototype.hasOwnProperty.call(e,"default")?e.default:e}function g_(e){if(e.__esModule)return e;var t=e.default;if(typeof t=="function"){var a=function i(){return this instanceof i?Reflect.construct(t,arguments,this.constructor):t.apply(this,arguments)};a.prototype=t.prototype}else a={};return Object.defineProperty(a,"__esModule",{value:!0}),Object.keys(e).forEach(function(i){var s=Object.getOwnPropertyDescriptor(e,i);Object.defineProperty(a,i,s.get?s:{enumerable:!0,get:function(){return e[i]}})}),a}var gm={exports:{}},ao={},ym={exports:{}},J={};/** + * @license React + * react.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */var Tr=Symbol.for("react.element"),y_=Symbol.for("react.portal"),__=Symbol.for("react.fragment"),v_=Symbol.for("react.strict_mode"),b_=Symbol.for("react.profiler"),w_=Symbol.for("react.provider"),T_=Symbol.for("react.context"),x_=Symbol.for("react.forward_ref"),q_=Symbol.for("react.suspense"),D_=Symbol.for("react.memo"),k_=Symbol.for("react.lazy"),$c=Symbol.iterator;function S_(e){return e===null||typeof e!="object"?null:(e=$c&&e[$c]||e["@@iterator"],typeof e=="function"?e:null)}var _m={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},vm=Object.assign,bm={};function ps(e,t,a){this.props=e,this.context=t,this.refs=bm,this.updater=a||_m}ps.prototype.isReactComponent={};ps.prototype.setState=function(e,t){if(typeof e!="object"&&typeof e!="function"&&e!=null)throw Error("setState(...): takes an object of state variables to update or a function which returns an object of state variables.");this.updater.enqueueSetState(this,e,t,"setState")};ps.prototype.forceUpdate=function(e){this.updater.enqueueForceUpdate(this,e,"forceUpdate")};function wm(){}wm.prototype=ps.prototype;function fd(e,t,a){this.props=e,this.context=t,this.refs=bm,this.updater=a||_m}var md=fd.prototype=new wm;md.constructor=fd;vm(md,ps.prototype);md.isPureReactComponent=!0;var Wc=Array.isArray,Tm=Object.prototype.hasOwnProperty,hd={current:null},xm={key:!0,ref:!0,__self:!0,__source:!0};function qm(e,t,a){var i,s={},r=null,n=null;if(t!=null)for(i in t.ref!==void 0&&(n=t.ref),t.key!==void 0&&(r=""+t.key),t)Tm.call(t,i)&&!xm.hasOwnProperty(i)&&(s[i]=t[i]);var o=arguments.length-2;if(o===1)s.children=a;else if(1>>1,X=C[K];if(0>>1;Ks(xe,L))res(rt,xe)?(C[K]=rt,C[re]=L,K=re):(C[K]=xe,C[ue]=L,K=ue);else if(res(rt,L))C[K]=rt,C[re]=L,K=re;else break e}}return I}function s(C,I){var L=C.sortIndex-I.sortIndex;return L!==0?L:C.id-I.id}if(typeof performance=="object"&&typeof performance.now=="function"){var r=performance;e.unstable_now=function(){return r.now()}}else{var n=Date,o=n.now();e.unstable_now=function(){return n.now()-o}}var l=[],u=[],d=1,f=null,g=3,c=!1,m=!1,p=!1,q=typeof setTimeout=="function"?setTimeout:null,y=typeof clearTimeout=="function"?clearTimeout:null,h=typeof setImmediate<"u"?setImmediate:null;typeof navigator<"u"&&navigator.scheduling!==void 0&&navigator.scheduling.isInputPending!==void 0&&navigator.scheduling.isInputPending.bind(navigator.scheduling);function _(C){for(var I=a(u);I!==null;){if(I.callback===null)i(u);else if(I.startTime<=C)i(u),I.sortIndex=I.expirationTime,t(l,I);else break;I=a(u)}}function w(C){if(p=!1,_(C),!m)if(a(l)!==null)m=!0,M(x);else{var I=a(u);I!==null&&N(w,I.startTime-C)}}function x(C,I){m=!1,p&&(p=!1,y(S),S=-1),c=!0;var L=g;try{for(_(I),f=a(l);f!==null&&(!(f.expirationTime>I)||C&&!j());){var K=f.callback;if(typeof K=="function"){f.callback=null,g=f.priorityLevel;var X=K(f.expirationTime<=I);I=e.unstable_now(),typeof X=="function"?f.callback=X:f===a(l)&&i(l),_(I)}else i(l);f=a(l)}if(f!==null)var Ae=!0;else{var ue=a(u);ue!==null&&N(w,ue.startTime-I),Ae=!1}return Ae}finally{f=null,g=L,c=!1}}var k=!1,T=null,S=-1,A=5,P=-1;function j(){return!(e.unstable_now()-PC||125K?(C.sortIndex=L,t(u,C),a(l)===null&&C===a(u)&&(p?(y(S),S=-1):p=!0,N(w,L-K))):(C.sortIndex=X,t(l,C),m||c||(m=!0,M(x))),C},e.unstable_shouldYield=j,e.unstable_wrapCallback=function(C){var I=g;return function(){var L=g;g=I;try{return C.apply(this,arguments)}finally{g=L}}}})(Cm);Sm.exports=Cm;var M_=Sm.exports;/** + * @license React + * react-dom.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */var Pm=b,vt=M_;function E(e){for(var t="https://reactjs.org/docs/error-decoder.html?invariant="+e,a=1;a"u"||typeof window.document>"u"||typeof window.document.createElement>"u"),Ql=Object.prototype.hasOwnProperty,O_=/^[:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD][:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\-.0-9\u00B7\u0300-\u036F\u203F-\u2040]*$/,Hc={},Vc={};function B_(e){return Ql.call(Vc,e)?!0:Ql.call(Hc,e)?!1:O_.test(e)?Vc[e]=!0:(Hc[e]=!0,!1)}function z_(e,t,a,i){if(a!==null&&a.type===0)return!1;switch(typeof t){case"function":case"symbol":return!0;case"boolean":return i?!1:a!==null?!a.acceptsBooleans:(e=e.toLowerCase().slice(0,5),e!=="data-"&&e!=="aria-");default:return!1}}function $_(e,t,a,i){if(t===null||typeof t>"u"||z_(e,t,a,i))return!0;if(i)return!1;if(a!==null)switch(a.type){case 3:return!t;case 4:return t===!1;case 5:return isNaN(t);case 6:return isNaN(t)||1>t}return!1}function st(e,t,a,i,s,r,n){this.acceptsBooleans=t===2||t===3||t===4,this.attributeName=i,this.attributeNamespace=s,this.mustUseProperty=a,this.propertyName=e,this.type=t,this.sanitizeURL=r,this.removeEmptyString=n}var Ge={};"children dangerouslySetInnerHTML defaultValue defaultChecked innerHTML suppressContentEditableWarning suppressHydrationWarning style".split(" ").forEach(function(e){Ge[e]=new st(e,0,!1,e,null,!1,!1)});[["acceptCharset","accept-charset"],["className","class"],["htmlFor","for"],["httpEquiv","http-equiv"]].forEach(function(e){var t=e[0];Ge[t]=new st(t,1,!1,e[1],null,!1,!1)});["contentEditable","draggable","spellCheck","value"].forEach(function(e){Ge[e]=new st(e,2,!1,e.toLowerCase(),null,!1,!1)});["autoReverse","externalResourcesRequired","focusable","preserveAlpha"].forEach(function(e){Ge[e]=new st(e,2,!1,e,null,!1,!1)});"allowFullScreen async autoFocus autoPlay controls default defer disabled disablePictureInPicture disableRemotePlayback formNoValidate hidden loop noModule noValidate open playsInline readOnly required reversed scoped seamless itemScope".split(" ").forEach(function(e){Ge[e]=new st(e,3,!1,e.toLowerCase(),null,!1,!1)});["checked","multiple","muted","selected"].forEach(function(e){Ge[e]=new st(e,3,!0,e,null,!1,!1)});["capture","download"].forEach(function(e){Ge[e]=new st(e,4,!1,e,null,!1,!1)});["cols","rows","size","span"].forEach(function(e){Ge[e]=new st(e,6,!1,e,null,!1,!1)});["rowSpan","start"].forEach(function(e){Ge[e]=new st(e,5,!1,e.toLowerCase(),null,!1,!1)});var yd=/[\-:]([a-z])/g;function _d(e){return e[1].toUpperCase()}"accent-height alignment-baseline arabic-form baseline-shift cap-height clip-path clip-rule color-interpolation color-interpolation-filters color-profile color-rendering dominant-baseline enable-background fill-opacity fill-rule flood-color flood-opacity font-family font-size font-size-adjust font-stretch font-style font-variant font-weight glyph-name glyph-orientation-horizontal glyph-orientation-vertical horiz-adv-x horiz-origin-x image-rendering letter-spacing lighting-color marker-end marker-mid marker-start overline-position overline-thickness paint-order panose-1 pointer-events rendering-intent shape-rendering stop-color stop-opacity strikethrough-position strikethrough-thickness stroke-dasharray stroke-dashoffset stroke-linecap stroke-linejoin stroke-miterlimit stroke-opacity stroke-width text-anchor text-decoration text-rendering underline-position underline-thickness unicode-bidi unicode-range units-per-em v-alphabetic v-hanging v-ideographic v-mathematical vector-effect vert-adv-y vert-origin-x vert-origin-y word-spacing writing-mode xmlns:xlink x-height".split(" ").forEach(function(e){var t=e.replace(yd,_d);Ge[t]=new st(t,1,!1,e,null,!1,!1)});"xlink:actuate xlink:arcrole xlink:role xlink:show xlink:title xlink:type".split(" ").forEach(function(e){var t=e.replace(yd,_d);Ge[t]=new st(t,1,!1,e,"http://www.w3.org/1999/xlink",!1,!1)});["xml:base","xml:lang","xml:space"].forEach(function(e){var t=e.replace(yd,_d);Ge[t]=new st(t,1,!1,e,"http://www.w3.org/XML/1998/namespace",!1,!1)});["tabIndex","crossOrigin"].forEach(function(e){Ge[e]=new st(e,1,!1,e.toLowerCase(),null,!1,!1)});Ge.xlinkHref=new st("xlinkHref",1,!1,"xlink:href","http://www.w3.org/1999/xlink",!0,!1);["src","href","action","formAction"].forEach(function(e){Ge[e]=new st(e,1,!1,e.toLowerCase(),null,!0,!0)});function vd(e,t,a,i){var s=Ge.hasOwnProperty(t)?Ge[t]:null;(s!==null?s.type!==0:i||!(2o||s[n]!==r[o]){var l=` +`+s[n].replace(" at new "," at ");return e.displayName&&l.includes("")&&(l=l.replace("",e.displayName)),l}while(1<=n&&0<=o);break}}}finally{cl=!1,Error.prepareStackTrace=a}return(e=e?e.displayName||e.name:"")?Fs(e):""}function W_(e){switch(e.tag){case 5:return Fs(e.type);case 16:return Fs("Lazy");case 13:return Fs("Suspense");case 19:return Fs("SuspenseList");case 0:case 2:case 15:return e=pl(e.type,!1),e;case 11:return e=pl(e.type.render,!1),e;case 1:return e=pl(e.type,!0),e;default:return""}}function eu(e){if(e==null)return null;if(typeof e=="function")return e.displayName||e.name||null;if(typeof e=="string")return e;switch(e){case Ni:return"Fragment";case Fi:return"Portal";case Jl:return"Profiler";case bd:return"StrictMode";case Yl:return"Suspense";case Zl:return"SuspenseList"}if(typeof e=="object")switch(e.$$typeof){case Rm:return(e.displayName||"Context")+".Consumer";case Im:return(e._context.displayName||"Context")+".Provider";case wd:var t=e.render;return e=e.displayName,e||(e=t.displayName||t.name||"",e=e!==""?"ForwardRef("+e+")":"ForwardRef"),e;case Td:return t=e.displayName||null,t!==null?t:eu(e.type)||"Memo";case Sa:t=e._payload,e=e._init;try{return eu(e(t))}catch{}}return null}function U_(e){var t=e.type;switch(e.tag){case 24:return"Cache";case 9:return(t.displayName||"Context")+".Consumer";case 10:return(t._context.displayName||"Context")+".Provider";case 18:return"DehydratedFragment";case 11:return e=t.render,e=e.displayName||e.name||"",t.displayName||(e!==""?"ForwardRef("+e+")":"ForwardRef");case 7:return"Fragment";case 5:return t;case 4:return"Portal";case 3:return"Root";case 6:return"Text";case 16:return eu(t);case 8:return t===bd?"StrictMode":"Mode";case 22:return"Offscreen";case 12:return"Profiler";case 21:return"Scope";case 13:return"Suspense";case 19:return"SuspenseList";case 25:return"TracingMarker";case 1:case 0:case 17:case 2:case 14:case 15:if(typeof t=="function")return t.displayName||t.name||null;if(typeof t=="string")return t}return null}function Wa(e){switch(typeof e){case"boolean":case"number":case"string":case"undefined":return e;case"object":return e;default:return""}}function jm(e){var t=e.type;return(e=e.nodeName)&&e.toLowerCase()==="input"&&(t==="checkbox"||t==="radio")}function H_(e){var t=jm(e)?"checked":"value",a=Object.getOwnPropertyDescriptor(e.constructor.prototype,t),i=""+e[t];if(!e.hasOwnProperty(t)&&typeof a<"u"&&typeof a.get=="function"&&typeof a.set=="function"){var s=a.get,r=a.set;return Object.defineProperty(e,t,{configurable:!0,get:function(){return s.call(this)},set:function(n){i=""+n,r.call(this,n)}}),Object.defineProperty(e,t,{enumerable:a.enumerable}),{getValue:function(){return i},setValue:function(n){i=""+n},stopTracking:function(){e._valueTracker=null,delete e[t]}}}}function Lr(e){e._valueTracker||(e._valueTracker=H_(e))}function Fm(e){if(!e)return!1;var t=e._valueTracker;if(!t)return!0;var a=t.getValue(),i="";return e&&(i=jm(e)?e.checked?"true":"false":e.value),e=i,e!==a?(t.setValue(e),!0):!1}function Tn(e){if(e=e||(typeof document<"u"?document:void 0),typeof e>"u")return null;try{return e.activeElement||e.body}catch{return e.body}}function tu(e,t){var a=t.checked;return we({},t,{defaultChecked:void 0,defaultValue:void 0,value:void 0,checked:a??e._wrapperState.initialChecked})}function Gc(e,t){var a=t.defaultValue==null?"":t.defaultValue,i=t.checked!=null?t.checked:t.defaultChecked;a=Wa(t.value!=null?t.value:a),e._wrapperState={initialChecked:i,initialValue:a,controlled:t.type==="checkbox"||t.type==="radio"?t.checked!=null:t.value!=null}}function Nm(e,t){t=t.checked,t!=null&&vd(e,"checked",t,!1)}function au(e,t){Nm(e,t);var a=Wa(t.value),i=t.type;if(a!=null)i==="number"?(a===0&&e.value===""||e.value!=a)&&(e.value=""+a):e.value!==""+a&&(e.value=""+a);else if(i==="submit"||i==="reset"){e.removeAttribute("value");return}t.hasOwnProperty("value")?iu(e,t.type,a):t.hasOwnProperty("defaultValue")&&iu(e,t.type,Wa(t.defaultValue)),t.checked==null&&t.defaultChecked!=null&&(e.defaultChecked=!!t.defaultChecked)}function Xc(e,t,a){if(t.hasOwnProperty("value")||t.hasOwnProperty("defaultValue")){var i=t.type;if(!(i!=="submit"&&i!=="reset"||t.value!==void 0&&t.value!==null))return;t=""+e._wrapperState.initialValue,a||t===e.value||(e.value=t),e.defaultValue=t}a=e.name,a!==""&&(e.name=""),e.defaultChecked=!!e._wrapperState.initialChecked,a!==""&&(e.name=a)}function iu(e,t,a){(t!=="number"||Tn(e.ownerDocument)!==e)&&(a==null?e.defaultValue=""+e._wrapperState.initialValue:e.defaultValue!==""+a&&(e.defaultValue=""+a))}var Ns=Array.isArray;function Ki(e,t,a,i){if(e=e.options,t){t={};for(var s=0;s"+t.valueOf().toString()+"",t=Mr.firstChild;e.firstChild;)e.removeChild(e.firstChild);for(;t.firstChild;)e.appendChild(t.firstChild)}});function er(e,t){if(t){var a=e.firstChild;if(a&&a===e.lastChild&&a.nodeType===3){a.nodeValue=t;return}}e.textContent=t}var Bs={animationIterationCount:!0,aspectRatio:!0,borderImageOutset:!0,borderImageSlice:!0,borderImageWidth:!0,boxFlex:!0,boxFlexGroup:!0,boxOrdinalGroup:!0,columnCount:!0,columns:!0,flex:!0,flexGrow:!0,flexPositive:!0,flexShrink:!0,flexNegative:!0,flexOrder:!0,gridArea:!0,gridRow:!0,gridRowEnd:!0,gridRowSpan:!0,gridRowStart:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnSpan:!0,gridColumnStart:!0,fontWeight:!0,lineClamp:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,tabSize:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeDasharray:!0,strokeDashoffset:!0,strokeMiterlimit:!0,strokeOpacity:!0,strokeWidth:!0},V_=["Webkit","ms","Moz","O"];Object.keys(Bs).forEach(function(e){V_.forEach(function(t){t=t+e.charAt(0).toUpperCase()+e.substring(1),Bs[t]=Bs[e]})});function Bm(e,t,a){return t==null||typeof t=="boolean"||t===""?"":a||typeof t!="number"||t===0||Bs.hasOwnProperty(e)&&Bs[e]?(""+t).trim():t+"px"}function zm(e,t){e=e.style;for(var a in t)if(t.hasOwnProperty(a)){var i=a.indexOf("--")===0,s=Bm(a,t[a],i);a==="float"&&(a="cssFloat"),i?e.setProperty(a,s):e[a]=s}}var K_=we({menuitem:!0},{area:!0,base:!0,br:!0,col:!0,embed:!0,hr:!0,img:!0,input:!0,keygen:!0,link:!0,meta:!0,param:!0,source:!0,track:!0,wbr:!0});function nu(e,t){if(t){if(K_[e]&&(t.children!=null||t.dangerouslySetInnerHTML!=null))throw Error(E(137,e));if(t.dangerouslySetInnerHTML!=null){if(t.children!=null)throw Error(E(60));if(typeof t.dangerouslySetInnerHTML!="object"||!("__html"in t.dangerouslySetInnerHTML))throw Error(E(61))}if(t.style!=null&&typeof t.style!="object")throw Error(E(62))}}function ou(e,t){if(e.indexOf("-")===-1)return typeof t.is=="string";switch(e){case"annotation-xml":case"color-profile":case"font-face":case"font-face-src":case"font-face-uri":case"font-face-format":case"font-face-name":case"missing-glyph":return!1;default:return!0}}var lu=null;function xd(e){return e=e.target||e.srcElement||window,e.correspondingUseElement&&(e=e.correspondingUseElement),e.nodeType===3?e.parentNode:e}var uu=null,Gi=null,Xi=null;function Yc(e){if(e=Dr(e)){if(typeof uu!="function")throw Error(E(280));var t=e.stateNode;t&&(t=oo(t),uu(e.stateNode,e.type,t))}}function $m(e){Gi?Xi?Xi.push(e):Xi=[e]:Gi=e}function Wm(){if(Gi){var e=Gi,t=Xi;if(Xi=Gi=null,Yc(e),t)for(e=0;e>>=0,e===0?32:31-(sv(e)/rv|0)|0}var Or=64,Br=4194304;function Ls(e){switch(e&-e){case 1:return 1;case 2:return 2;case 4:return 4;case 8:return 8;case 16:return 16;case 32:return 32;case 64:case 128:case 256:case 512:case 1024:case 2048:case 4096:case 8192:case 16384:case 32768:case 65536:case 131072:case 262144:case 524288:case 1048576:case 2097152:return e&4194240;case 4194304:case 8388608:case 16777216:case 33554432:case 67108864:return e&130023424;case 134217728:return 134217728;case 268435456:return 268435456;case 536870912:return 536870912;case 1073741824:return 1073741824;default:return e}}function kn(e,t){var a=e.pendingLanes;if(a===0)return 0;var i=0,s=e.suspendedLanes,r=e.pingedLanes,n=a&268435455;if(n!==0){var o=n&~s;o!==0?i=Ls(o):(r&=n,r!==0&&(i=Ls(r)))}else n=a&~s,n!==0?i=Ls(n):r!==0&&(i=Ls(r));if(i===0)return 0;if(t!==0&&t!==i&&!(t&s)&&(s=i&-i,r=t&-t,s>=r||s===16&&(r&4194240)!==0))return t;if(i&4&&(i|=a&16),t=e.entangledLanes,t!==0)for(e=e.entanglements,t&=i;0a;a++)t.push(e);return t}function xr(e,t,a){e.pendingLanes|=t,t!==536870912&&(e.suspendedLanes=0,e.pingedLanes=0),e=e.eventTimes,t=31-Ut(t),e[t]=a}function uv(e,t){var a=e.pendingLanes&~t;e.pendingLanes=t,e.suspendedLanes=0,e.pingedLanes=0,e.expiredLanes&=t,e.mutableReadLanes&=t,e.entangledLanes&=t,t=e.entanglements;var i=e.eventTimes;for(e=e.expirationTimes;0=$s),op=String.fromCharCode(32),lp=!1;function uh(e,t){switch(e){case"keyup":return Lv.indexOf(t.keyCode)!==-1;case"keydown":return t.keyCode!==229;case"keypress":case"mousedown":case"focusout":return!0;default:return!1}}function dh(e){return e=e.detail,typeof e=="object"&&"data"in e?e.data:null}var Li=!1;function Ov(e,t){switch(e){case"compositionend":return dh(t);case"keypress":return t.which!==32?null:(lp=!0,op);case"textInput":return e=t.data,e===op&&lp?null:e;default:return null}}function Bv(e,t){if(Li)return e==="compositionend"||!Id&&uh(e,t)?(e=oh(),on=Cd=Ia=null,Li=!1,e):null;switch(e){case"paste":return null;case"keypress":if(!(t.ctrlKey||t.altKey||t.metaKey)||t.ctrlKey&&t.altKey){if(t.char&&1=t)return{node:a,offset:t-e};e=i}e:{for(;a;){if(a.nextSibling){a=a.nextSibling;break e}a=a.parentNode}a=void 0}a=pp(a)}}function mh(e,t){return e&&t?e===t?!0:e&&e.nodeType===3?!1:t&&t.nodeType===3?mh(e,t.parentNode):"contains"in e?e.contains(t):e.compareDocumentPosition?!!(e.compareDocumentPosition(t)&16):!1:!1}function hh(){for(var e=window,t=Tn();t instanceof e.HTMLIFrameElement;){try{var a=typeof t.contentWindow.location.href=="string"}catch{a=!1}if(a)e=t.contentWindow;else break;t=Tn(e.document)}return t}function Rd(e){var t=e&&e.nodeName&&e.nodeName.toLowerCase();return t&&(t==="input"&&(e.type==="text"||e.type==="search"||e.type==="tel"||e.type==="url"||e.type==="password")||t==="textarea"||e.contentEditable==="true")}function Xv(e){var t=hh(),a=e.focusedElem,i=e.selectionRange;if(t!==a&&a&&a.ownerDocument&&mh(a.ownerDocument.documentElement,a)){if(i!==null&&Rd(a)){if(t=i.start,e=i.end,e===void 0&&(e=t),"selectionStart"in a)a.selectionStart=t,a.selectionEnd=Math.min(e,a.value.length);else if(e=(t=a.ownerDocument||document)&&t.defaultView||window,e.getSelection){e=e.getSelection();var s=a.textContent.length,r=Math.min(i.start,s);i=i.end===void 0?r:Math.min(i.end,s),!e.extend&&r>i&&(s=i,i=r,r=s),s=fp(a,r);var n=fp(a,i);s&&n&&(e.rangeCount!==1||e.anchorNode!==s.node||e.anchorOffset!==s.offset||e.focusNode!==n.node||e.focusOffset!==n.offset)&&(t=t.createRange(),t.setStart(s.node,s.offset),e.removeAllRanges(),r>i?(e.addRange(t),e.extend(n.node,n.offset)):(t.setEnd(n.node,n.offset),e.addRange(t)))}}for(t=[],e=a;e=e.parentNode;)e.nodeType===1&&t.push({element:e,left:e.scrollLeft,top:e.scrollTop});for(typeof a.focus=="function"&&a.focus(),a=0;a=document.documentMode,Mi=null,hu=null,Us=null,gu=!1;function mp(e,t,a){var i=a.window===a?a.document:a.nodeType===9?a:a.ownerDocument;gu||Mi==null||Mi!==Tn(i)||(i=Mi,"selectionStart"in i&&Rd(i)?i={start:i.selectionStart,end:i.selectionEnd}:(i=(i.ownerDocument&&i.ownerDocument.defaultView||window).getSelection(),i={anchorNode:i.anchorNode,anchorOffset:i.anchorOffset,focusNode:i.focusNode,focusOffset:i.focusOffset}),Us&&nr(Us,i)||(Us=i,i=Pn(hu,"onSelect"),0zi||(e.current=Tu[zi],Tu[zi]=null,zi--)}function ce(e,t){zi++,Tu[zi]=e.current,e.current=t}var Ua={},et=Ga(Ua),lt=Ga(!1),gi=Ua;function as(e,t){var a=e.type.contextTypes;if(!a)return Ua;var i=e.stateNode;if(i&&i.__reactInternalMemoizedUnmaskedChildContext===t)return i.__reactInternalMemoizedMaskedChildContext;var s={},r;for(r in a)s[r]=t[r];return i&&(e=e.stateNode,e.__reactInternalMemoizedUnmaskedChildContext=t,e.__reactInternalMemoizedMaskedChildContext=s),s}function ut(e){return e=e.childContextTypes,e!=null}function In(){fe(lt),fe(et)}function wp(e,t,a){if(et.current!==Ua)throw Error(E(168));ce(et,t),ce(lt,a)}function qh(e,t,a){var i=e.stateNode;if(t=t.childContextTypes,typeof i.getChildContext!="function")return a;i=i.getChildContext();for(var s in i)if(!(s in t))throw Error(E(108,U_(e)||"Unknown",s));return we({},a,i)}function Rn(e){return e=(e=e.stateNode)&&e.__reactInternalMemoizedMergedChildContext||Ua,gi=et.current,ce(et,e),ce(lt,lt.current),!0}function Tp(e,t,a){var i=e.stateNode;if(!i)throw Error(E(169));a?(e=qh(e,t,gi),i.__reactInternalMemoizedMergedChildContext=e,fe(lt),fe(et),ce(et,e)):fe(lt),ce(lt,a)}var ua=null,lo=!1,kl=!1;function Dh(e){ua===null?ua=[e]:ua.push(e)}function ob(e){lo=!0,Dh(e)}function Xa(){if(!kl&&ua!==null){kl=!0;var e=0,t=ie;try{var a=ua;for(ie=1;e>=n,s-=n,da=1<<32-Ut(t)+s|a<S?(A=T,T=null):A=T.sibling;var P=g(y,T,_[S],w);if(P===null){T===null&&(T=A);break}e&&T&&P.alternate===null&&t(y,T),h=r(P,h,S),k===null?x=P:k.sibling=P,k=P,T=A}if(S===_.length)return a(y,T),he&&ti(y,S),x;if(T===null){for(;S<_.length;S++)T=f(y,_[S],w),T!==null&&(h=r(T,h,S),k===null?x=T:k.sibling=T,k=T);return he&&ti(y,S),x}for(T=i(y,T);S<_.length;S++)A=c(T,y,S,_[S],w),A!==null&&(e&&A.alternate!==null&&T.delete(A.key===null?S:A.key),h=r(A,h,S),k===null?x=A:k.sibling=A,k=A);return e&&T.forEach(function(j){return t(y,j)}),he&&ti(y,S),x}function p(y,h,_,w){var x=vs(_);if(typeof x!="function")throw Error(E(150));if(_=x.call(_),_==null)throw Error(E(151));for(var k=x=null,T=h,S=h=0,A=null,P=_.next();T!==null&&!P.done;S++,P=_.next()){T.index>S?(A=T,T=null):A=T.sibling;var j=g(y,T,P.value,w);if(j===null){T===null&&(T=A);break}e&&T&&j.alternate===null&&t(y,T),h=r(j,h,S),k===null?x=j:k.sibling=j,k=j,T=A}if(P.done)return a(y,T),he&&ti(y,S),x;if(T===null){for(;!P.done;S++,P=_.next())P=f(y,P.value,w),P!==null&&(h=r(P,h,S),k===null?x=P:k.sibling=P,k=P);return he&&ti(y,S),x}for(T=i(y,T);!P.done;S++,P=_.next())P=c(T,y,S,P.value,w),P!==null&&(e&&P.alternate!==null&&T.delete(P.key===null?S:P.key),h=r(P,h,S),k===null?x=P:k.sibling=P,k=P);return e&&T.forEach(function(O){return t(y,O)}),he&&ti(y,S),x}function q(y,h,_,w){if(typeof _=="object"&&_!==null&&_.type===Ni&&_.key===null&&(_=_.props.children),typeof _=="object"&&_!==null){switch(_.$$typeof){case Nr:e:{for(var x=_.key,k=h;k!==null;){if(k.key===x){if(x=_.type,x===Ni){if(k.tag===7){a(y,k.sibling),h=s(k,_.props.children),h.return=y,y=h;break e}}else if(k.elementType===x||typeof x=="object"&&x!==null&&x.$$typeof===Sa&&Pp(x)===k.type){a(y,k.sibling),h=s(k,_.props),h.ref=qs(y,k,_),h.return=y,y=h;break e}a(y,k);break}else t(y,k);k=k.sibling}_.type===Ni?(h=hi(_.props.children,y.mode,w,_.key),h.return=y,y=h):(w=hn(_.type,_.key,_.props,null,y.mode,w),w.ref=qs(y,h,_),w.return=y,y=w)}return n(y);case Fi:e:{for(k=_.key;h!==null;){if(h.key===k)if(h.tag===4&&h.stateNode.containerInfo===_.containerInfo&&h.stateNode.implementation===_.implementation){a(y,h.sibling),h=s(h,_.children||[]),h.return=y,y=h;break e}else{a(y,h);break}else t(y,h);h=h.sibling}h=jl(_,y.mode,w),h.return=y,y=h}return n(y);case Sa:return k=_._init,q(y,h,k(_._payload),w)}if(Ns(_))return m(y,h,_,w);if(vs(_))return p(y,h,_,w);Kr(y,_)}return typeof _=="string"&&_!==""||typeof _=="number"?(_=""+_,h!==null&&h.tag===6?(a(y,h.sibling),h=s(h,_),h.return=y,y=h):(a(y,h),h=El(_,y.mode,w),h.return=y,y=h),n(y)):a(y,h)}return q}var ss=Eh(!0),jh=Eh(!1),kr={},ta=Ga(kr),dr=Ga(kr),cr=Ga(kr);function di(e){if(e===kr)throw Error(E(174));return e}function zd(e,t){switch(ce(cr,t),ce(dr,e),ce(ta,kr),e=t.nodeType,e){case 9:case 11:t=(t=t.documentElement)?t.namespaceURI:ru(null,"");break;default:e=e===8?t.parentNode:t,t=e.namespaceURI||null,e=e.tagName,t=ru(t,e)}fe(ta),ce(ta,t)}function rs(){fe(ta),fe(dr),fe(cr)}function Fh(e){di(cr.current);var t=di(ta.current),a=ru(t,e.type);t!==a&&(ce(dr,e),ce(ta,a))}function $d(e){dr.current===e&&(fe(ta),fe(dr))}var _e=Ga(0);function Mn(e){for(var t=e;t!==null;){if(t.tag===13){var a=t.memoizedState;if(a!==null&&(a=a.dehydrated,a===null||a.data==="$?"||a.data==="$!"))return t}else if(t.tag===19&&t.memoizedProps.revealOrder!==void 0){if(t.flags&128)return t}else if(t.child!==null){t.child.return=t,t=t.child;continue}if(t===e)break;for(;t.sibling===null;){if(t.return===null||t.return===e)return null;t=t.return}t.sibling.return=t.return,t=t.sibling}return null}var Sl=[];function Wd(){for(var e=0;ea?a:4,e(!0);var i=Cl.transition;Cl.transition={};try{e(!1),t()}finally{ie=a,Cl.transition=i}}function Jh(){return jt().memoizedState}function cb(e,t,a){var i=Ba(e);if(a={lane:i,action:a,hasEagerState:!1,eagerState:null,next:null},Yh(e))Zh(t,a);else if(a=Ph(e,t,a,i),a!==null){var s=at();Ht(a,e,i,s),eg(a,t,i)}}function pb(e,t,a){var i=Ba(e),s={lane:i,action:a,hasEagerState:!1,eagerState:null,next:null};if(Yh(e))Zh(t,s);else{var r=e.alternate;if(e.lanes===0&&(r===null||r.lanes===0)&&(r=t.lastRenderedReducer,r!==null))try{var n=t.lastRenderedState,o=r(n,a);if(s.hasEagerState=!0,s.eagerState=o,Vt(o,n)){var l=t.interleaved;l===null?(s.next=s,Od(t)):(s.next=l.next,l.next=s),t.interleaved=s;return}}catch{}finally{}a=Ph(e,t,s,i),a!==null&&(s=at(),Ht(a,e,i,s),eg(a,t,i))}}function Yh(e){var t=e.alternate;return e===ve||t!==null&&t===ve}function Zh(e,t){Hs=On=!0;var a=e.pending;a===null?t.next=t:(t.next=a.next,a.next=t),e.pending=t}function eg(e,t,a){if(a&4194240){var i=t.lanes;i&=e.pendingLanes,a|=i,t.lanes=a,Dd(e,a)}}var Bn={readContext:Et,useCallback:Qe,useContext:Qe,useEffect:Qe,useImperativeHandle:Qe,useInsertionEffect:Qe,useLayoutEffect:Qe,useMemo:Qe,useReducer:Qe,useRef:Qe,useState:Qe,useDebugValue:Qe,useDeferredValue:Qe,useTransition:Qe,useMutableSource:Qe,useSyncExternalStore:Qe,useId:Qe,unstable_isNewReconciler:!1},fb={readContext:Et,useCallback:function(e,t){return Gt().memoizedState=[e,t===void 0?null:t],e},useContext:Et,useEffect:Ip,useImperativeHandle:function(e,t,a){return a=a!=null?a.concat([e]):null,cn(4194308,4,Vh.bind(null,t,e),a)},useLayoutEffect:function(e,t){return cn(4194308,4,e,t)},useInsertionEffect:function(e,t){return cn(4,2,e,t)},useMemo:function(e,t){var a=Gt();return t=t===void 0?null:t,e=e(),a.memoizedState=[e,t],e},useReducer:function(e,t,a){var i=Gt();return t=a!==void 0?a(t):t,i.memoizedState=i.baseState=t,e={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:e,lastRenderedState:t},i.queue=e,e=e.dispatch=cb.bind(null,ve,e),[i.memoizedState,e]},useRef:function(e){var t=Gt();return e={current:e},t.memoizedState=e},useState:Ap,useDebugValue:Gd,useDeferredValue:function(e){return Gt().memoizedState=e},useTransition:function(){var e=Ap(!1),t=e[0];return e=db.bind(null,e[1]),Gt().memoizedState=e,[t,e]},useMutableSource:function(){},useSyncExternalStore:function(e,t,a){var i=ve,s=Gt();if(he){if(a===void 0)throw Error(E(407));a=a()}else{if(a=t(),$e===null)throw Error(E(349));_i&30||Mh(i,t,a)}s.memoizedState=a;var r={value:a,getSnapshot:t};return s.queue=r,Ip(Bh.bind(null,i,r,e),[e]),i.flags|=2048,mr(9,Oh.bind(null,i,r,a,t),void 0,null),a},useId:function(){var e=Gt(),t=$e.identifierPrefix;if(he){var a=ca,i=da;a=(i&~(1<<32-Ut(i)-1)).toString(32)+a,t=":"+t+"R"+a,a=pr++,0<\/script>",e=e.removeChild(e.firstChild)):typeof i.is=="string"?e=n.createElement(a,{is:i.is}):(e=n.createElement(a),a==="select"&&(n=e,i.multiple?n.multiple=!0:i.size&&(n.size=i.size))):e=n.createElementNS(e,a),e[Jt]=t,e[ur]=i,ug(e,t,!1,!1),t.stateNode=e;e:{switch(n=ou(a,i),a){case"dialog":pe("cancel",e),pe("close",e),s=i;break;case"iframe":case"object":case"embed":pe("load",e),s=i;break;case"video":case"audio":for(s=0;sos&&(t.flags|=128,i=!0,Ds(r,!1),t.lanes=4194304)}else{if(!i)if(e=Mn(n),e!==null){if(t.flags|=128,i=!0,a=e.updateQueue,a!==null&&(t.updateQueue=a,t.flags|=4),Ds(r,!0),r.tail===null&&r.tailMode==="hidden"&&!n.alternate&&!he)return Je(t),null}else 2*Ce()-r.renderingStartTime>os&&a!==1073741824&&(t.flags|=128,i=!0,Ds(r,!1),t.lanes=4194304);r.isBackwards?(n.sibling=t.child,t.child=n):(a=r.last,a!==null?a.sibling=n:t.child=n,r.last=n)}return r.tail!==null?(t=r.tail,r.rendering=t,r.tail=t.sibling,r.renderingStartTime=Ce(),t.sibling=null,a=_e.current,ce(_e,i?a&1|2:a&1),t):(Je(t),null);case 22:case 23:return ec(),i=t.memoizedState!==null,e!==null&&e.memoizedState!==null!==i&&(t.flags|=8192),i&&t.mode&1?ft&1073741824&&(Je(t),t.subtreeFlags&6&&(t.flags|=8192)):Je(t),null;case 24:return null;case 25:return null}throw Error(E(156,t.tag))}function wb(e,t){switch(jd(t),t.tag){case 1:return ut(t.type)&&In(),e=t.flags,e&65536?(t.flags=e&-65537|128,t):null;case 3:return rs(),fe(lt),fe(et),Wd(),e=t.flags,e&65536&&!(e&128)?(t.flags=e&-65537|128,t):null;case 5:return $d(t),null;case 13:if(fe(_e),e=t.memoizedState,e!==null&&e.dehydrated!==null){if(t.alternate===null)throw Error(E(340));is()}return e=t.flags,e&65536?(t.flags=e&-65537|128,t):null;case 19:return fe(_e),null;case 4:return rs(),null;case 10:return Md(t.type._context),null;case 22:case 23:return ec(),null;case 24:return null;default:return null}}var Xr=!1,Ze=!1,Tb=typeof WeakSet=="function"?WeakSet:Set,B=null;function Hi(e,t){var a=e.ref;if(a!==null)if(typeof a=="function")try{a(null)}catch(i){Se(e,t,i)}else a.current=null}function ju(e,t,a){try{a()}catch(i){Se(e,t,i)}}var Bp=!1;function xb(e,t){if(yu=Sn,e=hh(),Rd(e)){if("selectionStart"in e)var a={start:e.selectionStart,end:e.selectionEnd};else e:{a=(a=e.ownerDocument)&&a.defaultView||window;var i=a.getSelection&&a.getSelection();if(i&&i.rangeCount!==0){a=i.anchorNode;var s=i.anchorOffset,r=i.focusNode;i=i.focusOffset;try{a.nodeType,r.nodeType}catch{a=null;break e}var n=0,o=-1,l=-1,u=0,d=0,f=e,g=null;t:for(;;){for(var c;f!==a||s!==0&&f.nodeType!==3||(o=n+s),f!==r||i!==0&&f.nodeType!==3||(l=n+i),f.nodeType===3&&(n+=f.nodeValue.length),(c=f.firstChild)!==null;)g=f,f=c;for(;;){if(f===e)break t;if(g===a&&++u===s&&(o=n),g===r&&++d===i&&(l=n),(c=f.nextSibling)!==null)break;f=g,g=f.parentNode}f=c}a=o===-1||l===-1?null:{start:o,end:l}}else a=null}a=a||{start:0,end:0}}else a=null;for(_u={focusedElem:e,selectionRange:a},Sn=!1,B=t;B!==null;)if(t=B,e=t.child,(t.subtreeFlags&1028)!==0&&e!==null)e.return=t,B=e;else for(;B!==null;){t=B;try{var m=t.alternate;if(t.flags&1024)switch(t.tag){case 0:case 11:case 15:break;case 1:if(m!==null){var p=m.memoizedProps,q=m.memoizedState,y=t.stateNode,h=y.getSnapshotBeforeUpdate(t.elementType===t.type?p:Bt(t.type,p),q);y.__reactInternalSnapshotBeforeUpdate=h}break;case 3:var _=t.stateNode.containerInfo;_.nodeType===1?_.textContent="":_.nodeType===9&&_.documentElement&&_.removeChild(_.documentElement);break;case 5:case 6:case 4:case 17:break;default:throw Error(E(163))}}catch(w){Se(t,t.return,w)}if(e=t.sibling,e!==null){e.return=t.return,B=e;break}B=t.return}return m=Bp,Bp=!1,m}function Vs(e,t,a){var i=t.updateQueue;if(i=i!==null?i.lastEffect:null,i!==null){var s=i=i.next;do{if((s.tag&e)===e){var r=s.destroy;s.destroy=void 0,r!==void 0&&ju(t,a,r)}s=s.next}while(s!==i)}}function po(e,t){if(t=t.updateQueue,t=t!==null?t.lastEffect:null,t!==null){var a=t=t.next;do{if((a.tag&e)===e){var i=a.create;a.destroy=i()}a=a.next}while(a!==t)}}function Fu(e){var t=e.ref;if(t!==null){var a=e.stateNode;switch(e.tag){case 5:e=a;break;default:e=a}typeof t=="function"?t(e):t.current=e}}function pg(e){var t=e.alternate;t!==null&&(e.alternate=null,pg(t)),e.child=null,e.deletions=null,e.sibling=null,e.tag===5&&(t=e.stateNode,t!==null&&(delete t[Jt],delete t[ur],delete t[wu],delete t[rb],delete t[nb])),e.stateNode=null,e.return=null,e.dependencies=null,e.memoizedProps=null,e.memoizedState=null,e.pendingProps=null,e.stateNode=null,e.updateQueue=null}function fg(e){return e.tag===5||e.tag===3||e.tag===4}function zp(e){e:for(;;){for(;e.sibling===null;){if(e.return===null||fg(e.return))return null;e=e.return}for(e.sibling.return=e.return,e=e.sibling;e.tag!==5&&e.tag!==6&&e.tag!==18;){if(e.flags&2||e.child===null||e.tag===4)continue e;e.child.return=e,e=e.child}if(!(e.flags&2))return e.stateNode}}function Nu(e,t,a){var i=e.tag;if(i===5||i===6)e=e.stateNode,t?a.nodeType===8?a.parentNode.insertBefore(e,t):a.insertBefore(e,t):(a.nodeType===8?(t=a.parentNode,t.insertBefore(e,a)):(t=a,t.appendChild(e)),a=a._reactRootContainer,a!=null||t.onclick!==null||(t.onclick=An));else if(i!==4&&(e=e.child,e!==null))for(Nu(e,t,a),e=e.sibling;e!==null;)Nu(e,t,a),e=e.sibling}function Lu(e,t,a){var i=e.tag;if(i===5||i===6)e=e.stateNode,t?a.insertBefore(e,t):a.appendChild(e);else if(i!==4&&(e=e.child,e!==null))for(Lu(e,t,a),e=e.sibling;e!==null;)Lu(e,t,a),e=e.sibling}var He=null,zt=!1;function xa(e,t,a){for(a=a.child;a!==null;)mg(e,t,a),a=a.sibling}function mg(e,t,a){if(ea&&typeof ea.onCommitFiberUnmount=="function")try{ea.onCommitFiberUnmount(io,a)}catch{}switch(a.tag){case 5:Ze||Hi(a,t);case 6:var i=He,s=zt;He=null,xa(e,t,a),He=i,zt=s,He!==null&&(zt?(e=He,a=a.stateNode,e.nodeType===8?e.parentNode.removeChild(a):e.removeChild(a)):He.removeChild(a.stateNode));break;case 18:He!==null&&(zt?(e=He,a=a.stateNode,e.nodeType===8?Dl(e.parentNode,a):e.nodeType===1&&Dl(e,a),sr(e)):Dl(He,a.stateNode));break;case 4:i=He,s=zt,He=a.stateNode.containerInfo,zt=!0,xa(e,t,a),He=i,zt=s;break;case 0:case 11:case 14:case 15:if(!Ze&&(i=a.updateQueue,i!==null&&(i=i.lastEffect,i!==null))){s=i=i.next;do{var r=s,n=r.destroy;r=r.tag,n!==void 0&&(r&2||r&4)&&ju(a,t,n),s=s.next}while(s!==i)}xa(e,t,a);break;case 1:if(!Ze&&(Hi(a,t),i=a.stateNode,typeof i.componentWillUnmount=="function"))try{i.props=a.memoizedProps,i.state=a.memoizedState,i.componentWillUnmount()}catch(o){Se(a,t,o)}xa(e,t,a);break;case 21:xa(e,t,a);break;case 22:a.mode&1?(Ze=(i=Ze)||a.memoizedState!==null,xa(e,t,a),Ze=i):xa(e,t,a);break;default:xa(e,t,a)}}function $p(e){var t=e.updateQueue;if(t!==null){e.updateQueue=null;var a=e.stateNode;a===null&&(a=e.stateNode=new Tb),t.forEach(function(i){var s=Rb.bind(null,e,i);a.has(i)||(a.add(i),i.then(s,s))})}}function Ot(e,t){var a=t.deletions;if(a!==null)for(var i=0;is&&(s=n),i&=~r}if(i=s,i=Ce()-i,i=(120>i?120:480>i?480:1080>i?1080:1920>i?1920:3e3>i?3e3:4320>i?4320:1960*Db(i/1960))-i,10e?16:e,Ra===null)var i=!1;else{if(e=Ra,Ra=null,Wn=0,Z&6)throw Error(E(331));var s=Z;for(Z|=4,B=e.current;B!==null;){var r=B,n=r.child;if(B.flags&16){var o=r.deletions;if(o!==null){for(var l=0;lCe()-Yd?mi(e,0):Jd|=a),dt(e,t)}function Tg(e,t){t===0&&(e.mode&1?(t=Br,Br<<=1,!(Br&130023424)&&(Br=4194304)):t=1);var a=at();e=ha(e,t),e!==null&&(xr(e,t,a),dt(e,a))}function Ib(e){var t=e.memoizedState,a=0;t!==null&&(a=t.retryLane),Tg(e,a)}function Rb(e,t){var a=0;switch(e.tag){case 13:var i=e.stateNode,s=e.memoizedState;s!==null&&(a=s.retryLane);break;case 19:i=e.stateNode;break;default:throw Error(E(314))}i!==null&&i.delete(t),Tg(e,a)}var xg;xg=function(e,t,a){if(e!==null)if(e.memoizedProps!==t.pendingProps||lt.current)ot=!0;else{if(!(e.lanes&a)&&!(t.flags&128))return ot=!1,vb(e,t,a);ot=!!(e.flags&131072)}else ot=!1,he&&t.flags&1048576&&kh(t,jn,t.index);switch(t.lanes=0,t.tag){case 2:var i=t.type;pn(e,t),e=t.pendingProps;var s=as(t,et.current);Ji(t,a),s=Hd(null,t,i,e,s,a);var r=Vd();return t.flags|=1,typeof s=="object"&&s!==null&&typeof s.render=="function"&&s.$$typeof===void 0?(t.tag=1,t.memoizedState=null,t.updateQueue=null,ut(i)?(r=!0,Rn(t)):r=!1,t.memoizedState=s.state!==null&&s.state!==void 0?s.state:null,Bd(t),s.updater=uo,t.stateNode=s,s._reactInternals=t,Su(t,i,e,a),t=Au(null,t,i,!0,r,a)):(t.tag=0,he&&r&&Ed(t),tt(null,t,s,a),t=t.child),t;case 16:i=t.elementType;e:{switch(pn(e,t),e=t.pendingProps,s=i._init,i=s(i._payload),t.type=i,s=t.tag=jb(i),e=Bt(i,e),s){case 0:t=Pu(null,t,i,e,a);break e;case 1:t=Lp(null,t,i,e,a);break e;case 11:t=Fp(null,t,i,e,a);break e;case 14:t=Np(null,t,i,Bt(i.type,e),a);break e}throw Error(E(306,i,""))}return t;case 0:return i=t.type,s=t.pendingProps,s=t.elementType===i?s:Bt(i,s),Pu(e,t,i,s,a);case 1:return i=t.type,s=t.pendingProps,s=t.elementType===i?s:Bt(i,s),Lp(e,t,i,s,a);case 3:e:{if(ng(t),e===null)throw Error(E(387));i=t.pendingProps,r=t.memoizedState,s=r.element,Ah(e,t),Ln(t,i,null,a);var n=t.memoizedState;if(i=n.element,r.isDehydrated)if(r={element:i,isDehydrated:!1,cache:n.cache,pendingSuspenseBoundaries:n.pendingSuspenseBoundaries,transitions:n.transitions},t.updateQueue.baseState=r,t.memoizedState=r,t.flags&256){s=ns(Error(E(423)),t),t=Mp(e,t,i,a,s);break e}else if(i!==s){s=ns(Error(E(424)),t),t=Mp(e,t,i,a,s);break e}else for(gt=La(t.stateNode.containerInfo.firstChild),yt=t,he=!0,$t=null,a=jh(t,null,i,a),t.child=a;a;)a.flags=a.flags&-3|4096,a=a.sibling;else{if(is(),i===s){t=ga(e,t,a);break e}tt(e,t,i,a)}t=t.child}return t;case 5:return Fh(t),e===null&&qu(t),i=t.type,s=t.pendingProps,r=e!==null?e.memoizedProps:null,n=s.children,vu(i,s)?n=null:r!==null&&vu(i,r)&&(t.flags|=32),rg(e,t),tt(e,t,n,a),t.child;case 6:return e===null&&qu(t),null;case 13:return og(e,t,a);case 4:return zd(t,t.stateNode.containerInfo),i=t.pendingProps,e===null?t.child=ss(t,null,i,a):tt(e,t,i,a),t.child;case 11:return i=t.type,s=t.pendingProps,s=t.elementType===i?s:Bt(i,s),Fp(e,t,i,s,a);case 7:return tt(e,t,t.pendingProps,a),t.child;case 8:return tt(e,t,t.pendingProps.children,a),t.child;case 12:return tt(e,t,t.pendingProps.children,a),t.child;case 10:e:{if(i=t.type._context,s=t.pendingProps,r=t.memoizedProps,n=s.value,ce(Fn,i._currentValue),i._currentValue=n,r!==null)if(Vt(r.value,n)){if(r.children===s.children&&!lt.current){t=ga(e,t,a);break e}}else for(r=t.child,r!==null&&(r.return=t);r!==null;){var o=r.dependencies;if(o!==null){n=r.child;for(var l=o.firstContext;l!==null;){if(l.context===i){if(r.tag===1){l=pa(-1,a&-a),l.tag=2;var u=r.updateQueue;if(u!==null){u=u.shared;var d=u.pending;d===null?l.next=l:(l.next=d.next,d.next=l),u.pending=l}}r.lanes|=a,l=r.alternate,l!==null&&(l.lanes|=a),Du(r.return,a,t),o.lanes|=a;break}l=l.next}}else if(r.tag===10)n=r.type===t.type?null:r.child;else if(r.tag===18){if(n=r.return,n===null)throw Error(E(341));n.lanes|=a,o=n.alternate,o!==null&&(o.lanes|=a),Du(n,a,t),n=r.sibling}else n=r.child;if(n!==null)n.return=r;else for(n=r;n!==null;){if(n===t){n=null;break}if(r=n.sibling,r!==null){r.return=n.return,n=r;break}n=n.return}r=n}tt(e,t,s.children,a),t=t.child}return t;case 9:return s=t.type,i=t.pendingProps.children,Ji(t,a),s=Et(s),i=i(s),t.flags|=1,tt(e,t,i,a),t.child;case 14:return i=t.type,s=Bt(i,t.pendingProps),s=Bt(i.type,s),Np(e,t,i,s,a);case 15:return ig(e,t,t.type,t.pendingProps,a);case 17:return i=t.type,s=t.pendingProps,s=t.elementType===i?s:Bt(i,s),pn(e,t),t.tag=1,ut(i)?(e=!0,Rn(t)):e=!1,Ji(t,a),Rh(t,i,s),Su(t,i,s,a),Au(null,t,i,!0,e,a);case 19:return lg(e,t,a);case 22:return sg(e,t,a)}throw Error(E(156,t.tag))};function qg(e,t){return Qm(e,t)}function Eb(e,t,a,i){this.tag=e,this.key=a,this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null,this.index=0,this.ref=null,this.pendingProps=t,this.dependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null,this.mode=i,this.subtreeFlags=this.flags=0,this.deletions=null,this.childLanes=this.lanes=0,this.alternate=null}function At(e,t,a,i){return new Eb(e,t,a,i)}function ac(e){return e=e.prototype,!(!e||!e.isReactComponent)}function jb(e){if(typeof e=="function")return ac(e)?1:0;if(e!=null){if(e=e.$$typeof,e===wd)return 11;if(e===Td)return 14}return 2}function za(e,t){var a=e.alternate;return a===null?(a=At(e.tag,t,e.key,e.mode),a.elementType=e.elementType,a.type=e.type,a.stateNode=e.stateNode,a.alternate=e,e.alternate=a):(a.pendingProps=t,a.type=e.type,a.flags=0,a.subtreeFlags=0,a.deletions=null),a.flags=e.flags&14680064,a.childLanes=e.childLanes,a.lanes=e.lanes,a.child=e.child,a.memoizedProps=e.memoizedProps,a.memoizedState=e.memoizedState,a.updateQueue=e.updateQueue,t=e.dependencies,a.dependencies=t===null?null:{lanes:t.lanes,firstContext:t.firstContext},a.sibling=e.sibling,a.index=e.index,a.ref=e.ref,a}function hn(e,t,a,i,s,r){var n=2;if(i=e,typeof e=="function")ac(e)&&(n=1);else if(typeof e=="string")n=5;else e:switch(e){case Ni:return hi(a.children,s,r,t);case bd:n=8,s|=8;break;case Jl:return e=At(12,a,t,s|2),e.elementType=Jl,e.lanes=r,e;case Yl:return e=At(13,a,t,s),e.elementType=Yl,e.lanes=r,e;case Zl:return e=At(19,a,t,s),e.elementType=Zl,e.lanes=r,e;case Em:return mo(a,s,r,t);default:if(typeof e=="object"&&e!==null)switch(e.$$typeof){case Im:n=10;break e;case Rm:n=9;break e;case wd:n=11;break e;case Td:n=14;break e;case Sa:n=16,i=null;break e}throw Error(E(130,e==null?e:typeof e,""))}return t=At(n,a,t,s),t.elementType=e,t.type=i,t.lanes=r,t}function hi(e,t,a,i){return e=At(7,e,i,t),e.lanes=a,e}function mo(e,t,a,i){return e=At(22,e,i,t),e.elementType=Em,e.lanes=a,e.stateNode={isHidden:!1},e}function El(e,t,a){return e=At(6,e,null,t),e.lanes=a,e}function jl(e,t,a){return t=At(4,e.children!==null?e.children:[],e.key,t),t.lanes=a,t.stateNode={containerInfo:e.containerInfo,pendingChildren:null,implementation:e.implementation},t}function Fb(e,t,a,i,s){this.tag=t,this.containerInfo=e,this.finishedWork=this.pingCache=this.current=this.pendingChildren=null,this.timeoutHandle=-1,this.callbackNode=this.pendingContext=this.context=null,this.callbackPriority=0,this.eventTimes=ml(0),this.expirationTimes=ml(-1),this.entangledLanes=this.finishedLanes=this.mutableReadLanes=this.expiredLanes=this.pingedLanes=this.suspendedLanes=this.pendingLanes=0,this.entanglements=ml(0),this.identifierPrefix=i,this.onRecoverableError=s,this.mutableSourceEagerHydrationData=null}function ic(e,t,a,i,s,r,n,o,l){return e=new Fb(e,t,a,o,l),t===1?(t=1,r===!0&&(t|=8)):t=0,r=At(3,null,null,t),e.current=r,r.stateNode=e,r.memoizedState={element:i,isDehydrated:a,cache:null,transitions:null,pendingSuspenseBoundaries:null},Bd(r),e}function Nb(e,t,a){var i=3"u"||typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE!="function"))try{__REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE(Cg)}catch(e){console.error(e)}}Cg(),km.exports=bt;var vo=km.exports;const Yr=hm(vo);var Qp=vo;Xl.createRoot=Qp.createRoot,Xl.hydrateRoot=Qp.hydrateRoot;/** + * @remix-run/router v1.8.0 + * + * Copyright (c) Remix Software Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE.md file in the root directory of this source tree. + * + * @license MIT + */function gr(){return gr=Object.assign?Object.assign.bind():function(e){for(var t=1;t"u")throw new Error(t)}function oc(e,t){if(!e){typeof console<"u"&&console.warn(t);try{throw new Error(t)}catch{}}}function $b(){return Math.random().toString(36).substr(2,8)}function Yp(e,t){return{usr:e.state,key:e.key,idx:t}}function $u(e,t,a,i){return a===void 0&&(a=null),gr({pathname:typeof e=="string"?e:e.pathname,search:"",hash:""},typeof t=="string"?hs(t):t,{state:a,key:t&&t.key||i||$b()})}function Vn(e){let{pathname:t="/",search:a="",hash:i=""}=e;return a&&a!=="?"&&(t+=a.charAt(0)==="?"?a:"?"+a),i&&i!=="#"&&(t+=i.charAt(0)==="#"?i:"#"+i),t}function hs(e){let t={};if(e){let a=e.indexOf("#");a>=0&&(t.hash=e.substr(a),e=e.substr(0,a));let i=e.indexOf("?");i>=0&&(t.search=e.substr(i),e=e.substr(0,i)),e&&(t.pathname=e)}return t}function Wb(e,t,a,i){i===void 0&&(i={});let{window:s=document.defaultView,v5Compat:r=!1}=i,n=s.history,o=Ea.Pop,l=null,u=d();u==null&&(u=0,n.replaceState(gr({},n.state,{idx:u}),""));function d(){return(n.state||{idx:null}).idx}function f(){o=Ea.Pop;let q=d(),y=q==null?null:q-u;u=q,l&&l({action:o,location:p.location,delta:y})}function g(q,y){o=Ea.Push;let h=$u(p.location,q,y);a&&a(h,q),u=d()+1;let _=Yp(h,u),w=p.createHref(h);try{n.pushState(_,"",w)}catch(x){if(x instanceof DOMException&&x.name==="DataCloneError")throw x;s.location.assign(w)}r&&l&&l({action:o,location:p.location,delta:1})}function c(q,y){o=Ea.Replace;let h=$u(p.location,q,y);a&&a(h,q),u=d();let _=Yp(h,u),w=p.createHref(h);n.replaceState(_,"",w),r&&l&&l({action:o,location:p.location,delta:0})}function m(q){let y=s.location.origin!=="null"?s.location.origin:s.location.href,h=typeof q=="string"?q:Vn(q);return je(y,"No window.location.(origin|href) available to create URL for href: "+h),new URL(h,y)}let p={get action(){return o},get location(){return e(s,n)},listen(q){if(l)throw new Error("A history only accepts one active listener");return s.addEventListener(Jp,f),l=q,()=>{s.removeEventListener(Jp,f),l=null}},createHref(q){return t(s,q)},createURL:m,encodeLocation(q){let y=m(q);return{pathname:y.pathname,search:y.search,hash:y.hash}},push:g,replace:c,go(q){return n.go(q)}};return p}var Zp;(function(e){e.data="data",e.deferred="deferred",e.redirect="redirect",e.error="error"})(Zp||(Zp={}));function Ub(e,t,a){a===void 0&&(a="/");let i=typeof t=="string"?hs(t):t,s=lc(i.pathname||"/",a);if(s==null)return null;let r=Pg(e);Hb(r);let n=null;for(let o=0;n==null&&o{let l={relativePath:o===void 0?r.path||"":o,caseSensitive:r.caseSensitive===!0,childrenIndex:n,route:r};l.relativePath.startsWith("/")&&(je(l.relativePath.startsWith(i),'Absolute route path "'+l.relativePath+'" nested under path '+('"'+i+'" is not valid. An absolute child route path ')+"must start with the combined path of all its parent routes."),l.relativePath=l.relativePath.slice(i.length));let u=$a([i,l.relativePath]),d=a.concat(l);r.children&&r.children.length>0&&(je(r.index!==!0,"Index routes must not have child routes. Please remove "+('all child routes from route path "'+u+'".')),Pg(r.children,t,d,u)),!(r.path==null&&!r.index)&&t.push({path:u,score:Yb(u,r.index),routesMeta:d})};return e.forEach((r,n)=>{var o;if(r.path===""||!((o=r.path)!=null&&o.includes("?")))s(r,n);else for(let l of Ag(r.path))s(r,n,l)}),t}function Ag(e){let t=e.split("/");if(t.length===0)return[];let[a,...i]=t,s=a.endsWith("?"),r=a.replace(/\?$/,"");if(i.length===0)return s?[r,""]:[r];let n=Ag(i.join("/")),o=[];return o.push(...n.map(l=>l===""?r:[r,l].join("/"))),s&&o.push(...n),o.map(l=>e.startsWith("/")&&l===""?"/":l)}function Hb(e){e.sort((t,a)=>t.score!==a.score?a.score-t.score:Zb(t.routesMeta.map(i=>i.childrenIndex),a.routesMeta.map(i=>i.childrenIndex)))}const Vb=/^:\w+$/,Kb=3,Gb=2,Xb=1,Qb=10,Jb=-2,ef=e=>e==="*";function Yb(e,t){let a=e.split("/"),i=a.length;return a.some(ef)&&(i+=Jb),t&&(i+=Gb),a.filter(s=>!ef(s)).reduce((s,r)=>s+(Vb.test(r)?Kb:r===""?Xb:Qb),i)}function Zb(e,t){return e.length===t.length&&e.slice(0,-1).every((i,s)=>i===t[s])?e[e.length-1]-t[t.length-1]:0}function ew(e,t){let{routesMeta:a}=e,i={},s="/",r=[];for(let n=0;n{if(d==="*"){let g=o[f]||"";n=r.slice(0,r.length-g.length).replace(/(.)\/+$/,"$1")}return u[d]=sw(o[f]||"",d),u},{}),pathname:r,pathnameBase:n,pattern:e}}function aw(e,t,a){t===void 0&&(t=!1),a===void 0&&(a=!0),oc(e==="*"||!e.endsWith("*")||e.endsWith("/*"),'Route path "'+e+'" will be treated as if it were '+('"'+e.replace(/\*$/,"/*")+'" because the `*` character must ')+"always follow a `/` in the pattern. To get rid of this warning, "+('please change the route path to "'+e.replace(/\*$/,"/*")+'".'));let i=[],s="^"+e.replace(/\/*\*?$/,"").replace(/^\/*/,"/").replace(/[\\.*+^$?{}|()[\]]/g,"\\$&").replace(/\/:(\w+)/g,(n,o)=>(i.push(o),"/([^\\/]+)"));return e.endsWith("*")?(i.push("*"),s+=e==="*"||e==="/*"?"(.*)$":"(?:\\/(.+)|\\/*)$"):a?s+="\\/*$":e!==""&&e!=="/"&&(s+="(?:(?=\\/|$))"),[new RegExp(s,t?void 0:"i"),i]}function iw(e){try{return decodeURI(e)}catch(t){return oc(!1,'The URL path "'+e+'" could not be decoded because it is is a malformed URL segment. This is probably due to a bad percent '+("encoding ("+t+").")),e}}function sw(e,t){try{return decodeURIComponent(e)}catch(a){return oc(!1,'The value for the URL param "'+t+'" will not be decoded because'+(' the string "'+e+'" is a malformed URL segment. This is probably')+(" due to a bad percent encoding ("+a+").")),e}}function lc(e,t){if(t==="/")return e;if(!e.toLowerCase().startsWith(t.toLowerCase()))return null;let a=t.endsWith("/")?t.length-1:t.length,i=e.charAt(a);return i&&i!=="/"?null:e.slice(a)||"/"}function rw(e,t){t===void 0&&(t="/");let{pathname:a,search:i="",hash:s=""}=typeof e=="string"?hs(e):e;return{pathname:a?a.startsWith("/")?a:nw(a,t):t,search:lw(i),hash:uw(s)}}function nw(e,t){let a=t.replace(/\/+$/,"").split("/");return e.split("/").forEach(s=>{s===".."?a.length>1&&a.pop():s!=="."&&a.push(s)}),a.length>1?a.join("/"):"/"}function Fl(e,t,a,i){return"Cannot include a '"+e+"' character in a manually specified "+("`to."+t+"` field ["+JSON.stringify(i)+"]. Please separate it out to the ")+("`to."+a+"` field. Alternatively you may provide the full path as ")+'a string in and the router will parse it for you.'}function Ig(e){return e.filter((t,a)=>a===0||t.route.path&&t.route.path.length>0)}function Rg(e,t,a,i){i===void 0&&(i=!1);let s;typeof e=="string"?s=hs(e):(s=gr({},e),je(!s.pathname||!s.pathname.includes("?"),Fl("?","pathname","search",s)),je(!s.pathname||!s.pathname.includes("#"),Fl("#","pathname","hash",s)),je(!s.search||!s.search.includes("#"),Fl("#","search","hash",s)));let r=e===""||s.pathname==="",n=r?"/":s.pathname,o;if(i||n==null)o=a;else{let f=t.length-1;if(n.startsWith("..")){let g=n.split("/");for(;g[0]==="..";)g.shift(),f-=1;s.pathname=g.join("/")}o=f>=0?t[f]:"/"}let l=rw(s,o),u=n&&n!=="/"&&n.endsWith("/"),d=(r||n===".")&&a.endsWith("/");return!l.pathname.endsWith("/")&&(u||d)&&(l.pathname+="/"),l}const $a=e=>e.join("/").replace(/\/\/+/g,"/"),ow=e=>e.replace(/\/+$/,"").replace(/^\/*/,"/"),lw=e=>!e||e==="?"?"":e.startsWith("?")?e:"?"+e,uw=e=>!e||e==="#"?"":e.startsWith("#")?e:"#"+e;function dw(e){return e!=null&&typeof e.status=="number"&&typeof e.statusText=="string"&&typeof e.internal=="boolean"&&"data"in e}const Eg=["post","put","patch","delete"];new Set(Eg);const cw=["get",...Eg];new Set(cw);/** + * React Router v6.15.0 + * + * Copyright (c) Remix Software Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE.md file in the root directory of this source tree. + * + * @license MIT + */function Kn(){return Kn=Object.assign?Object.assign.bind():function(e){for(var t=1;tl.pathnameBase)),n=b.useRef(!1);return Fg(()=>{n.current=!0}),b.useCallback(function(l,u){if(u===void 0&&(u={}),!n.current)return;if(typeof l=="number"){a.go(l);return}let d=Rg(l,JSON.parse(r),s,u.relative==="path");e==null&&t!=="/"&&(d.pathname=d.pathname==="/"?t:$a([t,d.pathname])),(u.replace?a.replace:a.push)(d,u.state,u)},[t,a,r,s,e])}function gw(){let{matches:e}=b.useContext(Qa),t=e[e.length-1];return t?t.params:{}}function Ng(e,t){let{relative:a}=t===void 0?{}:t,{matches:i}=b.useContext(Qa),{pathname:s}=wo(),r=JSON.stringify(Ig(i).map(n=>n.pathnameBase));return b.useMemo(()=>Rg(e,JSON.parse(r),s,a==="path"),[e,r,s,a])}function yw(e,t){return _w(e,t)}function _w(e,t,a){Sr()||je(!1);let{navigator:i}=b.useContext(gs),{matches:s}=b.useContext(Qa),r=s[s.length-1],n=r?r.params:{};r&&r.pathname;let o=r?r.pathnameBase:"/";r&&r.route;let l=wo(),u;if(t){var d;let p=typeof t=="string"?hs(t):t;o==="/"||(d=p.pathname)!=null&&d.startsWith(o)||je(!1),u=p}else u=l;let f=u.pathname||"/",g=o==="/"?f:f.slice(o.length)||"/",c=Ub(e,{pathname:g}),m=xw(c&&c.map(p=>Object.assign({},p,{params:Object.assign({},n,p.params),pathname:$a([o,i.encodeLocation?i.encodeLocation(p.pathname).pathname:p.pathname]),pathnameBase:p.pathnameBase==="/"?o:$a([o,i.encodeLocation?i.encodeLocation(p.pathnameBase).pathname:p.pathnameBase])})),s,a);return t&&m?b.createElement(bo.Provider,{value:{location:Kn({pathname:"/",search:"",hash:"",state:null,key:"default"},u),navigationType:Ea.Pop}},m):m}function vw(){let e=Sw(),t=dw(e)?e.status+" "+e.statusText:e instanceof Error?e.message:JSON.stringify(e),a=e instanceof Error?e.stack:null,s={padding:"0.5rem",backgroundColor:"rgba(200,200,200, 0.5)"},r=null;return b.createElement(b.Fragment,null,b.createElement("h2",null,"Unexpected Application Error!"),b.createElement("h3",{style:{fontStyle:"italic"}},t),a?b.createElement("pre",{style:s},a):null,r)}const bw=b.createElement(vw,null);class ww extends b.Component{constructor(t){super(t),this.state={location:t.location,revalidation:t.revalidation,error:t.error}}static getDerivedStateFromError(t){return{error:t}}static getDerivedStateFromProps(t,a){return a.location!==t.location||a.revalidation!=="idle"&&t.revalidation==="idle"?{error:t.error,location:t.location,revalidation:t.revalidation}:{error:t.error||a.error,location:a.location,revalidation:t.revalidation||a.revalidation}}componentDidCatch(t,a){console.error("React Router caught the following error during render",t,a)}render(){return this.state.error?b.createElement(Qa.Provider,{value:this.props.routeContext},b.createElement(jg.Provider,{value:this.state.error,children:this.props.component})):this.props.children}}function Tw(e){let{routeContext:t,match:a,children:i}=e,s=b.useContext(uc);return s&&s.static&&s.staticContext&&(a.route.errorElement||a.route.ErrorBoundary)&&(s.staticContext._deepestRenderedBoundaryId=a.route.id),b.createElement(Qa.Provider,{value:t},i)}function xw(e,t,a){var i;if(t===void 0&&(t=[]),a===void 0&&(a=null),e==null){var s;if((s=a)!=null&&s.errors)e=a.matches;else return null}let r=e,n=(i=a)==null?void 0:i.errors;if(n!=null){let o=r.findIndex(l=>l.route.id&&(n==null?void 0:n[l.route.id]));o>=0||je(!1),r=r.slice(0,Math.min(r.length,o+1))}return r.reduceRight((o,l,u)=>{let d=l.route.id?n==null?void 0:n[l.route.id]:null,f=null;a&&(f=l.route.errorElement||bw);let g=t.concat(r.slice(0,u+1)),c=()=>{let m;return d?m=f:l.route.Component?m=b.createElement(l.route.Component,null):l.route.element?m=l.route.element:m=o,b.createElement(Tw,{match:l,routeContext:{outlet:o,matches:g,isDataRoute:a!=null},children:m})};return a&&(l.route.ErrorBoundary||l.route.errorElement||u===0)?b.createElement(ww,{location:a.location,revalidation:a.revalidation,component:f,error:d,children:c(),routeContext:{outlet:null,matches:g,isDataRoute:!0}}):c()},null)}var Lg=function(e){return e.UseBlocker="useBlocker",e.UseRevalidator="useRevalidator",e.UseNavigateStable="useNavigate",e}(Lg||{}),Gn=function(e){return e.UseBlocker="useBlocker",e.UseLoaderData="useLoaderData",e.UseActionData="useActionData",e.UseRouteError="useRouteError",e.UseNavigation="useNavigation",e.UseRouteLoaderData="useRouteLoaderData",e.UseMatches="useMatches",e.UseRevalidator="useRevalidator",e.UseNavigateStable="useNavigate",e.UseRouteId="useRouteId",e}(Gn||{});function qw(e){let t=b.useContext(uc);return t||je(!1),t}function Dw(e){let t=b.useContext(pw);return t||je(!1),t}function kw(e){let t=b.useContext(Qa);return t||je(!1),t}function Mg(e){let t=kw(),a=t.matches[t.matches.length-1];return a.route.id||je(!1),a.route.id}function Sw(){var e;let t=b.useContext(jg),a=Dw(Gn.UseRouteError),i=Mg(Gn.UseRouteError);return t||((e=a.errors)==null?void 0:e[i])}function Cw(){let{router:e}=qw(Lg.UseNavigateStable),t=Mg(Gn.UseNavigateStable),a=b.useRef(!1);return Fg(()=>{a.current=!0}),b.useCallback(function(s,r){r===void 0&&(r={}),a.current&&(typeof s=="number"?e.navigate(s):e.navigate(s,Kn({fromRouteId:t},r)))},[e,t])}function Wu(e){je(!1)}function Pw(e){let{basename:t="/",children:a=null,location:i,navigationType:s=Ea.Pop,navigator:r,static:n=!1}=e;Sr()&&je(!1);let o=t.replace(/^\/*/,"/"),l=b.useMemo(()=>({basename:o,navigator:r,static:n}),[o,r,n]);typeof i=="string"&&(i=hs(i));let{pathname:u="/",search:d="",hash:f="",state:g=null,key:c="default"}=i,m=b.useMemo(()=>{let p=lc(u,o);return p==null?null:{location:{pathname:p,search:d,hash:f,state:g,key:c},navigationType:s}},[o,u,d,f,g,c,s]);return m==null?null:b.createElement(gs.Provider,{value:l},b.createElement(bo.Provider,{children:a,value:m}))}function Aw(e){let{children:t,location:a}=e;return yw(Uu(t),a)}new Promise(()=>{});function Uu(e,t){t===void 0&&(t=[]);let a=[];return b.Children.forEach(e,(i,s)=>{if(!b.isValidElement(i))return;let r=[...t,s];if(i.type===b.Fragment){a.push.apply(a,Uu(i.props.children,r));return}i.type!==Wu&&je(!1),!i.props.index||!i.props.children||je(!1);let n={id:i.props.id||r.join("-"),caseSensitive:i.props.caseSensitive,element:i.props.element,Component:i.props.Component,index:i.props.index,path:i.props.path,loader:i.props.loader,action:i.props.action,errorElement:i.props.errorElement,ErrorBoundary:i.props.ErrorBoundary,hasErrorBoundary:i.props.ErrorBoundary!=null||i.props.errorElement!=null,shouldRevalidate:i.props.shouldRevalidate,handle:i.props.handle,lazy:i.props.lazy};i.props.children&&(n.children=Uu(i.props.children,r)),a.push(n)}),a}/** + * React Router DOM v6.15.0 + * + * Copyright (c) Remix Software Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE.md file in the root directory of this source tree. + * + * @license MIT + */function Hu(){return Hu=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0)&&(a[s]=e[s]);return a}function Rw(e){return!!(e.metaKey||e.altKey||e.ctrlKey||e.shiftKey)}function Ew(e,t){return e.button===0&&(!t||t==="_self")&&!Rw(e)}const jw=["onClick","relative","reloadDocument","replace","state","target","to","preventScrollReset"],Fw="startTransition",tf=wn[Fw];function Nw(e){let{basename:t,children:a,future:i,window:s}=e,r=b.useRef();r.current==null&&(r.current=zb({window:s,v5Compat:!0}));let n=r.current,[o,l]=b.useState({action:n.action,location:n.location}),{v7_startTransition:u}=i||{},d=b.useCallback(f=>{u&&tf?tf(()=>l(f)):l(f)},[l,u]);return b.useLayoutEffect(()=>n.listen(d),[n,d]),b.createElement(Pw,{basename:t,children:a,location:o.location,navigationType:o.action,navigator:n})}const Lw=typeof window<"u"&&typeof window.document<"u"&&typeof window.document.createElement<"u",Mw=/^(?:[a-z][a-z0-9+.-]*:|\/\/)/i,Xn=b.forwardRef(function(t,a){let{onClick:i,relative:s,reloadDocument:r,replace:n,state:o,target:l,to:u,preventScrollReset:d}=t,f=Iw(t,jw),{basename:g}=b.useContext(gs),c,m=!1;if(typeof u=="string"&&Mw.test(u)&&(c=u,Lw))try{let h=new URL(window.location.href),_=u.startsWith("//")?new URL(h.protocol+u):new URL(u),w=lc(_.pathname,g);_.origin===h.origin&&w!=null?u=w+_.search+_.hash:m=!0}catch{}let p=fw(u,{relative:s}),q=Ow(u,{replace:n,state:o,target:l,preventScrollReset:d,relative:s});function y(h){i&&i(h),h.defaultPrevented||q(h)}return b.createElement("a",Hu({},f,{href:c||p,onClick:m||r?i:y,ref:a,target:l}))});var af;(function(e){e.UseScrollRestoration="useScrollRestoration",e.UseSubmit="useSubmit",e.UseSubmitFetcher="useSubmitFetcher",e.UseFetcher="useFetcher"})(af||(af={}));var sf;(function(e){e.UseFetchers="useFetchers",e.UseScrollRestoration="useScrollRestoration"})(sf||(sf={}));function Ow(e,t){let{target:a,replace:i,state:s,preventScrollReset:r,relative:n}=t===void 0?{}:t,o=mw(),l=wo(),u=Ng(e,{relative:n});return b.useCallback(d=>{if(Ew(d,a)){d.preventDefault();let f=i!==void 0?i:Vn(l)===Vn(u);o(e,{replace:f,state:s,preventScrollReset:r,relative:n})}},[l,o,u,i,s,a,e,r,n])}const Bw="/aiida-registry/pr-preview/pr-254/assets/logo-white-text-16948862.svg",zw="/aiida-registry/pr-preview/pr-254/assets/MARVEL-32e738c9.png",$w="/aiida-registry/pr-preview/pr-254/assets/MaX-099f261c.png";const Ww={"aiida-QECpWorkChain":{code_home:"https://github.com/rikigigi/aiida-QECpWorkChain",development_status:"beta",entry_point_prefix:"qecpworkchain",pip_url:"git+https://github.com/rikigigi/aiida-QECpWorkChain",name:"aiida-QECpWorkChain",package_name:"aiida_QECpWorkChain",hosted_on:"github.com",metadata:{author:"Riccardo Bertossa",author_email:"rbertoss@sissa.it",version:"0.2.0a0",description:"Car-Parrinello Work Chain with Quantum Espresso. This workchain does a full CP simulation, from the choice of the electronic mass and the timestep, to the choice of the best parallelization options, and then it does the NPT equilibration and a final NVE simulation at the prescribed P and T. Automates as much as possible.",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: GNU General Public License v3 (GPLv3)","Natural Language :: English","Framework :: AiiDA"]},aiida_version:">=2.0.0,<3.0.0",entry_points:{"aiida.workflows":{"qecpworkchain.cp":{description:["No description available"],spec:{inputs:[{name:"cp_code",required:!0,valid_types:"Code",info:""},{name:"cp_resources_cg_list",required:!0,valid_types:"List",info:"Same as cp_resources_cp_list but when doing a CG. The CG uses a different amount of resource and can use no band or task group parallelization."},{name:"cp_resources_cp_list",required:!0,valid_types:"List",info:`List of dictionary like the following: +{ + 'resources' : { + 'num_machines' : 2, + 'num_mpiprocs_per_machine' : 48, + }, + 'wallclock' : 3600, + 'queue' : 'queue_name', + 'account': 'account_name', +} +c,porturrently only the first element of the list is used. +'wallclock' is the maximum time that can be requested to the scheduler. This code can decide to ask for less. +`},{name:"ecutwfc",required:!0,valid_types:"Float",info:"wavefunction cutoff (Ry), like in the QE input"},{name:"pseudo_family",required:!0,valid_types:"Str",info:"pseudopotential family to use, as in usual aiida operations"},{name:"pw_code",required:!0,valid_types:"Code",info:"input pw code (used to calculate force ratio)"},{name:"pw_resources_list",required:!0,valid_types:"List",info:"Same as cp_resources_cp_list but for pw.x code."},{name:"structure",required:!0,valid_types:"StructureData, TrajectoryData",info:"Input structure. If a trajectory is given, the workchain will use its last step to start the CG. If velocities are present, they will be used to initialize the simulation. Note that if you use a trajectory, usually kind information (like mass) are not included, so default values will be used. If you want to include kind information or override those provided with the input structure, use the input structure_kinds"},{name:"thermobarostat_points",required:!0,valid_types:"List",info:'List of dicts, each with the format [ { "temperature_K": 1000, "pressure_KBar": 10 , "equilibration_time_ps": 5.0, "thermostat_time_ps": 5.0} ]. The simulation will loop over this list of dictionaries, in the same order, equilibrating for the specified time at the given P,T point. Every point is repeated if the average T and P are not within the specified ranges'},{name:"additional_parameters_cp",required:!1,valid_types:"Dict",info:"parameters that will be included in the settings input of the QE CP plugin. These settings will be added on top of the default one. Same format as plugin input"},{name:"adjust_ionic_mass",required:!1,valid_types:"Bool",info:"Multiply the mass of the ions by the corresponding force ration between the cp forces and pw forces -- that is less than 1. Note that averages of static properties do not depend on the ionic masses."},{name:"benchmark_emass_dt_walltime_s",required:!1,valid_types:"Float",info:"same as benchmark_parallel_walltime_s but for dermining the best electronic mass and timestep."},{name:"benchmark_parallel_walltime_s",required:!1,valid_types:"Float",info:"time requested to the scheduler during the test for finding the best parallelization parameters."},{name:"cmdline_cp",required:!1,valid_types:"List, NoneType",info:"additional command line parameters of the cp verlet caclulations only (for example parallelization options)"},{name:"default_nose_frequency",required:!1,valid_types:"Float",info:"default nose frequency when a frequency cannot be estimated from the vibrational spectrum"},{name:"dt",required:!1,valid_types:"Float, NoneType",info:"timestep in atomic units, if not automatically chosen."},{name:"dt_start_stop_step",required:!1,valid_types:"List",info:"list of timesteps to try. Timesteps are changed to better integrate the equation of motion. When a new electronic mass is selected by this workchain timesteps are automatically adjusted."},{name:"emass",required:!1,valid_types:"Float, NoneType",info:"electronic mass, atomic mass units, if not automatically chosen"},{name:"emass_list",required:!1,valid_types:"List",info:"list of electronic masses to try. The emass is selected in order to satisfy the requested CP/DFT force ratio."},{name:"initial_atomic_velocities_A_ps",required:!1,valid_types:"ArrayData, NoneType",info:"optional input initial velocities in angstrom over picoseconds"},{name:"max_slope_const",required:!1,valid_types:"Float",info:"max slope in K/ps of the constant of motion linear fit."},{name:"max_slope_ekinc",required:!1,valid_types:"Float",info:"max slope in K/ps of the ekinc linear fit. If not satisfied try to change emass"},{name:"max_slope_min_emass",required:!1,valid_types:"Float",info:"minimum possible value of electronic mass that can be set by the max_slope correction routine. Will not go lower than that."},{name:"max_slope_min_ps",required:!1,valid_types:"Float",info:"minimum required lenght in ps of the last trajectory to do the linear fit on ekinc and const of motion"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"min_traj_steps_vdos",required:!1,valid_types:"Int",info:"minimum number of steps to consider the calculated vibrational spectrum maximum valid, to set the thermostat frequency"},{name:"minimum_nose_frequency",required:!1,valid_types:"Float",info:"minimum nose frequency: if the frequency estimated from the vibrational spectrum is lower than this value, this value is used"},{name:"nstep_initial_cg",required:!1,valid_types:"Int",info:"At the beginning of the simulation the CP algorithm is not used. This is the number of steps to do using Born-Oppenheimer molecular dynamics algorithm with a conjugate gradient minimization of the electronic ground state."},{name:"nstep_parallel_test",required:!1,valid_types:"Int",info:"the benchmark simulations will be that long, if performed"},{name:"number_of_pw_per_trajectory",required:!1,valid_types:"Int",info:"Number of pw submitted for every trajectory during calculation of force ratio."},{name:"nve_required_picoseconds",required:!1,valid_types:"Float",info:"The equilibrated NVE simulation will last at least this number of picoseconds. How much picoseconds do you want?"},{name:"pressure_tolerance",required:!1,valid_types:"Float",info:"Pressure tolerance in kBar used to say if the npt is equilibrated. If not setted, use the standard deviation of the P time series"},{name:"skip_emass_dt_test",required:!1,valid_types:"Bool",info:""},{name:"skip_parallel_test",required:!1,valid_types:"Bool",info:"do not run run benchmarks to discover a good internal Quantum Espresso parallelization scheme for the current system"},{name:"skip_thermobarostat",required:!1,valid_types:"Bool",info:""},{name:"structure_kinds",required:!1,valid_types:"List, NoneType",info:'These kinds will be used to override or set the masses of the various atomic types. Note that the workflow, if skip_emass_dt_test is True, will calculate the ratio between cp forces and pw forces and adjust the provided masses automatically according to this ratio. So if you provide this input, make sure to set skip_emass_dt_test to True and set also the inputs emass and dt, or "bad things can happen"'},{name:"target_force_ratio",required:!1,valid_types:"Float",info:"The forces calculated by the Car-Parrinello method are affected by two types of error: one is due to the oscillations of the electrons around the DFT energy minimum, and the second is due to the finite mass of the electronic fluid that produces a _sistematic_ error in the forces, as if the electrons add mass to the ionic core. This second kind of error is can be controlled by this parameter, that tries to adjust the electronic mass to obtain the desidered ratio between CP forces and true DFT forces. Then you may want to modify the ionic mass to correct the leading factor of this error."},{name:"temperature_tolerance",required:!1,valid_types:"Float",info:"Temperature tolerance in K used to say if the npt is equilibrated. If not setted, use the standard deviation of the T time series"},{name:"tempw_initial_random",required:!1,valid_types:"Float, NoneType",info:"If provided, sets the initial temperature when randomly initializing the starting velocities."}],outputs:[{name:"cmdline_cp",required:!0,valid_types:"",info:""},{name:"dt",required:!0,valid_types:"",info:""},{name:"emass",required:!0,valid_types:"",info:""},{name:"full_traj",required:!0,valid_types:"",info:""},{name:"kinds",required:!0,valid_types:"",info:""},{name:"nve_prod_traj",required:!0,valid_types:"",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:401,message:"The initial cg steps failed. I cannot start to work."},{status:402,message:"Nose-Hoover thermostat failed."},{status:403,message:"Final cg after Nose-Hoover failed."},{status:404,message:"Error in the NVE simulation"},{status:405,message:"The simulations are calculating very expensive random numbers. There is something wrong (cutoff? metal? boo?)"},{status:406,message:"Wrong input parameters"},{status:407,message:"Parallel test was not succesful, maybe there is something more wrong."},{status:408,message:"Multiple errors in the simulation that cannot fix."},{status:409,message:"This is a bug in the workchain."}]},class:"aiida_QECpWorkChain.workflow:CpWorkChain"}}},commits_count:5,summaryinfo:[{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install git+https://github.com/rikigigi/aiida-QECpWorkChain",is_installable:"True"},"aiida-abinit":{code_home:"https://github.com/sponce24/aiida-abinit",entry_point_prefix:"abinit",pip_url:"aiida-abinit",plugin_info:"https://raw.github.com/sponce24/aiida-abinit/master/setup.json",name:"aiida-abinit",package_name:"aiida_abinit",hosted_on:"github.com",metadata:{description:"The AiiDA plugin for ABINIT.",author_email:"Samuel Ponce ",classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],version:"0.4.0"},aiida_version:">=1.6.3,<1.7.0",entry_points:{"aiida.calculations":{abinit:{description:["AiiDA calculation plugin wrapping the abinit executable."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"kpoints",required:!0,valid_types:"KpointsData",info:"The k-point mesh or path"},{name:"parameters",required:!0,valid_types:"Dict",info:"The ABINIT input parameters."},{name:"pseudos",required:!0,valid_types:"Psp8Data, JthXmlData",info:"The pseudopotentials."},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"parent_calc_folder",required:!1,valid_types:"RemoteData",info:"A remote folder used for restarts."},{name:"settings",required:!1,valid_types:"Dict",info:"Various special settings."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"Various output quantities."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_bands",required:!1,valid_types:"BandsData",info:"Final electronic bands if present."},{name:"output_structure",required:!1,valid_types:"StructureData",info:"Final structure of the calculation if present."},{name:"output_trajectory",required:!1,valid_types:"TrajectoryData",info:"Trajectory of various output quantities over the calculation if present."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"Calculation did not produce all expected output files."},{status:101,message:"Calculation did not produce the expected `[prefix]o_GSR.nc` output file."},{status:102,message:"Calculation did not produce the expected `[prefix]o_HIST.nc` output file."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"The retrieved folder did not contain the `stdout` output file."},{status:301,message:"The `stdout` output file could not be read."},{status:302,message:"The `stdout` output file could not be parsed."},{status:303,message:"The `abipy` `EventsParser` reports that the runw as not completed."},{status:304,message:"The output file contains one or more error messages."},{status:305,message:"The output file contains one or more warning messages."},{status:312,message:"The output structure could not be parsed."},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:500,message:"The SCF minimization cycle did not converge."},{status:501,message:"The ionic minimization cycle did not converge."}]},class:"aiida_abinit.calculations:AbinitCalculation"}},"aiida.parsers":{abinit:"aiida_abinit.parsers:AbinitParser"},"aiida.workflows":{"abinit.base":{description:["Base Abinit Workchain to perform a DFT calculation. Validates parameters and restart."],spec:{inputs:[{name:"abinit",required:!0,valid_types:"",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"An explicit k-points mesh or list. Either this or `kpoints_distance` must be provided."},{name:"kpoints_distance",required:!1,valid_types:"Float",info:"The minimum desired distance in 1/Å between k-points in reciprocal space. The explicit k-point mesh will be generated automatically by a calculation function based on the input structure."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"Various output quantities."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_bands",required:!1,valid_types:"BandsData",info:"Final electronic bands if present."},{name:"output_structure",required:!1,valid_types:"StructureData",info:"Final structure of the calculation if present."},{name:"output_trajectory",required:!1,valid_types:"TrajectoryData",info:"Trajectory of various output quantities over the calculation if present."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:201,message:"`pseudos` could not be used to get the necessary pseudos."},{status:202,message:"Neither the `kpoints` nor the `kpoints_distance` input was specified."},{status:203,message:"Neither the `options` nor `automatic_parallelization` input was specified."},{status:204,message:"The `metadata.options` did not specify both `resources.num_machines` and `max_wallclock_seconds`."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_abinit.workflows.base:AbinitBaseWorkChain"}}},commits_count:12,development_status:"beta",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install aiida-abinit",is_installable:"True"},"aiida-aenet":{code_home:"https://gitlab.com/lattice737/aiida-aenet",development_status:"planning",entry_point_prefix:"aenet",pip_url:"https://gitlab.com/lattice737/aiida-aenet",name:"aiida-aenet",package_name:"aiida_aenet",hosted_on:"gitlab.com",metadata:{author:"Nicholas Martinez",author_email:"nicholasmartinez@my.unt.edu",version:"0.1.0",description:"AiiDA plugin to construct machine-learning potentials using aenet",classifiers:["Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Development Status :: 0 - Alpha"]},aiida_version:"~=1.2",entry_points:{"aiida.data":{"aenet.algorithm":"aiida_aenet.data.algorithm:AenetAlgorithm","aenet.potential":"aiida_aenet.data.potentials:AenetPotential"},"aiida.calculations":{"aenet.cur":"aiida_aenet.calculations.cur:CurCalculation","aenet.generate":"aiida_aenet.calculations.generate:AenetGenerateCalculation","aenet.predict":"aiida_aenet.calculations.predict:AenetPredictCalculation","aenet.simulate":"aiida_aenet.calculations.simulate:AenetLammpsMdCalculation","aenet.train":"aiida_aenet.calculations.train:AenetTrainCalculation","aenet.transform":"aiida_aenet.calculations.transform:TransformCalculation"},"aiida.parsers":{"aenet.generate":"aiida_aenet.parsers.generate:AenetGenerateParser","aenet.predict":"aiida_aenet.parsers.predict:AenetPredictParser","aenet.simulate":"aiida_aenet.parsers.simulate:AenetLammpsMdParser","aenet.train":"aiida_aenet.parsers.train:AenetTrainParser"},"aiida.workflows":{"aenet.build_reference":"aiida_aenet.workflows.build_reference:BuildReferenceWorkChain","aenet.compare_simulations":"aiida_aenet.workflows.compare_simulations:CompareSimulationsWorkChain","aenet.make_potential":"aiida_aenet.workflows.make_potential:MakePotentialWorkChain","aenet.make_structures":"aiida_aenet.workflows.make_structures:MakeStructuresWorkChain"},"aenet.potentials":{"lammps.ann":"aiida_aenet.data.potentials.lammps:ANN"}},commits_count:1,summaryinfo:[{colorclass:"blue",text:"Calculations",count:6},{colorclass:"brown",text:"Parsers",count:4},{colorclass:"red",text:"Data",count:2},{colorclass:"green",text:"Workflows",count:4},{colorclass:"orange",text:"Other (Aenet potentials)",count:1}],pip_install_cmd:"pip install https://gitlab.com/lattice737/aiida-aenet"},"aiida-alloy":{code_home:"https://github.com/DanielMarchand/aiida-alloy",development_status:"beta",entry_point_prefix:"alloy",pip_url:"git+https://github.com/DanielMarchand/aiida-alloy",name:"aiida-alloy",package_name:"aiida_alloy",hosted_on:"github.com",metadata:{author:"The AiiDA developers group",author_email:"",version:"0.1.0a0",description:"Aiida Workflows for Elastic Constants using Quantum Espresso",classifiers:["Programming Language :: Python"]},aiida_version:">=1.0.0a0",entry_points:{"aiida.workflows":{elastic:"aiida_alloy.workflows.ElasticWorkChain:ElasticWorkChain"}},commits_count:1,summaryinfo:[{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install git+https://github.com/DanielMarchand/aiida-alloy",is_installable:"False"},"aiida-ase":{code_home:"https://github.com/aiidateam/aiida-ase",documentation_url:"https://aiida-ase.readthedocs.io/",entry_point_prefix:"ase",pip_url:"aiida-ase",plugin_info:"https://raw.github.com/aiidateam/aiida-ase/master/setup.json",name:"aiida-ase",package_name:"aiida_ase",hosted_on:"github.com",metadata:{description:"The official AiiDA plugin for ASE.",author_email:"The AiiDA team ",classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering"],version:"2.0.0"},aiida_version:">=1.6,<2.0",entry_points:{"aiida.calculations":{"ase.ase":{description:["`CalcJob` implementation that can be used to wrap around the ASE calculators."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"Dict",info:"Input parameters for the namelists."},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"The k-points to use for the calculation."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"settings",required:!1,valid_types:"Dict",info:"Optional settings that control the plugin."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"array",required:!1,valid_types:"ArrayData",info:""},{name:"parameters",required:!1,valid_types:"Dict",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"structure",required:!1,valid_types:"StructureData",info:""},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:300,message:"One of the expected output files was missing."},{status:301,message:"The log file from the DFT code was not written out."},{status:302,message:"Relaxation did not complete."},{status:303,message:"SCF Failed."},{status:305,message:"Cannot identify what went wrong."},{status:306,message:"gpaw could not find the PAW potentials."},{status:307,message:"Attribute Error found in the stderr file."},{status:308,message:"Fermi level is infinite."},{status:400,message:"The calculation ran out of walltime."}]},class:"aiida_ase.calculations.ase:AseCalculation"}},"aiida.parsers":{"ase.ase":"aiida_ase.parsers.ase:AseParser","ase.gpaw":"aiida_ase.parsers.gpaw:GpawParser"},"aiida.workflows":{"ase.gpaw.base":{description:["Workchain to run a GPAW calculation with automated error handling and restarts."],spec:{inputs:[{name:"gpaw",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure."},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"k-points to use for the calculation."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"array",required:!1,valid_types:"ArrayData",info:""},{name:"parameters",required:!1,valid_types:"Dict",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"structure",required:!1,valid_types:"StructureData",info:""},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_ase.workflows.base:GpawBaseWorkChain"}}},commits_count:8,development_status:"beta",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:2},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install aiida-ase",is_installable:"True"},"aiida-autocas":{entry_point_prefix:"autocas",code_home:"https://github.com/microsoft/aiida-autocas",version_file:"https://raw.githubusercontent.com/microsoft/aiida-autocas/main/aiida_autocas/__init__.py",pip_url:"git+https://github.com/microsoft/aiida-autocas",name:"aiida-autocas",package_name:"aiida_autocas",hosted_on:"github.com",metadata:{version:"0.1.0",description:"AiiDA AutoCAS Plugin",classifiers:[]},aiida_version:">=2.0,<3",entry_points:{"aiida.calculations":{autocas:"aiida_autocas.calculations:AutoCASCalculation"},"aiida.parsers":{autocas:"aiida_autocas.parsers:AutoCASParser"}},commits_count:11,development_status:"planning",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install git+https://github.com/microsoft/aiida-autocas"},"aiida-bands-inspect":{code_home:"https://github.com/greschd/aiida-bands-inspect",documentation_url:"https://aiida-bands-inspect.readthedocs.io",entry_point_prefix:"bands_inspect",pip_url:"aiida-bands-inspect",name:"aiida-bands-inspect",package_name:"aiida_bands_inspect",hosted_on:"github.com",metadata:{description:"AiiDA Plugin for running bands_inspect",author:"Dominik Gresch",author_email:"greschd@gmx.ch",license:"Apache 2.0",home_page:"https://aiida-bands-inspect.readthedocs.io",classifiers:["Development Status :: 4 - Beta","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: Apache Software License","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Topic :: Scientific/Engineering :: Physics"],version:"0.4.0"},aiida_version:null,entry_points:{"aiida.calculations":{"bands_inspect.align":{description:["Calculation class for the ``bands-inspect align`` command.",""," Arguments"," ---------"," bands1 : aiida.orm.data.array.bands.BandsData"," First band structure to compare."," bands2 : aiida.orm.data.array.bands.BandsData"," Second band structure to compare."],spec:{inputs:[{name:"bands1",required:!0,valid_types:"BandsData",info:"First bandstructure which is to be aligned"},{name:"bands2",required:!0,valid_types:"BandsData",info:"Second bandstructure which is to be aligned"},{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"bands1_shifted",required:!0,valid_types:"BandsData",info:""},{name:"bands2_shifted",required:!0,valid_types:"BandsData",info:""},{name:"difference",required:!0,valid_types:"Float",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"shift",required:!0,valid_types:"Float",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"At least one of the expected output files is missing from the retrieved folder."},{status:220,message:"The text output file content is not in the expected format."}]},class:"aiida_bands_inspect.calculations.align:AlignCalculation"},"bands_inspect.difference":{description:["Calculation class for the ``bands-inspect difference`` command.",""," Arguments"," ---------"," bands1 : aiida.orm.nodes.data.array.bands.BandsData"," First band structure to compare."," bands2 : aiida.orm.nodes.data.array.bands.BandsData"," Second band structure to compare."],spec:{inputs:[{name:"bands1",required:!0,valid_types:"BandsData",info:"First bandstructure which is to be compared"},{name:"bands2",required:!0,valid_types:"BandsData",info:"Second bandstructure which is to be compared"},{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"difference",required:!0,valid_types:"Float",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"The retrieved folder does not contain the difference output file."}]},class:"aiida_bands_inspect.calculations.difference:DifferenceCalculation"},"bands_inspect.plot":{description:["Calculation class for the ``bands_inspect plot`` command.",""," Arguments"," ---------"," bands1 : aiida.orm.nodes.data.array.bands.BandsData"," First band structure to plot."," bands2 : aiida.orm.nodes.data.array.bands.BandsData"," Second band structure to plot."],spec:{inputs:[{name:"bands1",required:!0,valid_types:"BandsData",info:"First bandstructure which is to be plotted"},{name:"bands2",required:!0,valid_types:"BandsData",info:"Second bandstructure which is to be plotted"},{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"plot",required:!0,valid_types:"SinglefileData",info:"The created band-structure comparison plot."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"The retrieved folder does not contain the plot output file."}]},class:"aiida_bands_inspect.calculations.plot:PlotCalculation"}},"aiida.parsers":{"bands_inspect.bands":"aiida_bands_inspect.parsers.bands:BandsParser","bands_inspect.difference":"aiida_bands_inspect.parsers.difference:DifferenceParser","bands_inspect.align":"aiida_bands_inspect.parsers.align:AlignParser","bands_inspect.plot":"aiida_bands_inspect.parsers.plot:PlotParser"}},commits_count:0,development_status:"beta",summaryinfo:[{colorclass:"blue",text:"Calculations",count:3},{colorclass:"brown",text:"Parsers",count:4}],pip_install_cmd:"pip install aiida-bands-inspect",is_installable:"True"},"aiida-bigdft":{code_home:"https://github.com/BigDFT-group/aiida-bigdft-plugin",development_status:"beta",entry_point_prefix:"bigdft",pip_url:"aiida-bigdft",plugin_info:"https://raw.github.com/BigDFT-group/aiida-bigdft-plugin/master/setup.json",name:"aiida-bigdft",package_name:"aiida_bigdft",hosted_on:"github.com",metadata:{description:"Aiida plugin for BigDFT code",author:"The BigDFT Team",author_email:"bigdft-developers@lists.launchpad.net",license:"MIT",home_page:"https://github.com/BigDFT-group/aiida-bigdft-plugin",classifiers:["Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python"],version:"0.2.6"},aiida_version:">=1.1.1,<2.0.0",entry_points:{"aiida.calculations":{bigdft:{description:["AiiDA calculation plugin wrapping the BigDFT python interface."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"BigDFTParameters",info:"Command line parameters for BigDFT"},{name:"structure",required:!0,valid_types:"StructureData",info:"StructureData struct"},{name:"extra_retrieved_files",required:!1,valid_types:"List",info:""},{name:"kpoints",required:!1,valid_types:"Dict",info:"kpoint mesh or kpoint path"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"pseudos",required:!1,valid_types:"List",info:""},{name:"structurefile",required:!1,valid_types:"Str",info:"xyz file"}],outputs:[{name:"bigdft_logfile",required:!0,valid_types:"BigDFTLogfile",info:"BigDFT log file as a dict"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:100,message:"Calculation did not produce all expected output files."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."}]},class:"aiida_bigdft.calculations.bigdft:BigDFTCalculation"},"bigdft.postscript":{description:["AiiDA calculation to add post treatments to a computation workcahin."," post treatment scripts are to be registered as codes in aiida."," They are python scripts accepting one argument : a remotefolder where data is stored"," Output files are not specified and can be added to the extra_retrieved_files list"],spec:{inputs:[{name:"bigdft_data_folder",required:!0,valid_types:"RemoteData",info:"Folder to the BigDFT data folder"},{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"retrieved_files",required:!1,valid_types:"List",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:101,message:"Script execution failed"},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."}]},class:"aiida_bigdft.calculations.postscript:ScriptCalculation"}},"aiida.cmdline.data":{bigdft:"aiida_bigdft.cli:data_cli"},"aiida.data":{bigdft:"aiida_bigdft.data:BigDFTParameters",bigdft_logfile:"aiida_bigdft.data:BigDFTLogfile"},"aiida.parsers":{bigdft:"aiida_bigdft.parsers:BigDFTParser"},"aiida.workflows":{bigdft:{description:["No description available"],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"BigDFTParameters",info:"Command line parameters for BigDFT"},{name:"structure",required:!0,valid_types:"StructureData",info:"StructureData struct"},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"extra_retrieved_files",required:!1,valid_types:"List",info:""},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"kpoints",required:!1,valid_types:"Dict",info:"kpoint mesh or kpoint path"},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"pseudos",required:!1,valid_types:"List",info:""},{name:"run_opts",required:!1,valid_types:"Dict",info:"metadata"},{name:"show_warnings",required:!1,valid_types:"Bool",info:"turn the warnings on/off."},{name:"structurefile",required:!1,valid_types:"Str",info:"xyz file"}],outputs:[{name:"bigdft_logfile",required:!0,valid_types:"BigDFTLogfile",info:"BigDFT log file as a dict"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"BigDFT input error"},{status:200,message:"BigDFT runtime error"},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_bigdft.workflows.base:BigDFTBaseWorkChain"},"bigdft.relax":{description:["Structure relaxation workchain."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"relax",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:"StructureData struct"},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"extra_retrieved_files",required:!1,valid_types:"List",info:""},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"kpoints",required:!1,valid_types:"Dict",info:"kpoint mesh or kpoint path"},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"parameters",required:!1,valid_types:"BigDFTParameters",info:"param dictionary"},{name:"pseudos",required:!1,valid_types:"List",info:""},{name:"run_opts",required:!1,valid_types:"Dict",info:"metadata"},{name:"show_warnings",required:!1,valid_types:"Bool",info:"turn the warnings on/off."},{name:"structurefile",required:!1,valid_types:"Str",info:"xyz file"}],outputs:[{name:"bigdft_logfile",required:!0,valid_types:"BigDFTLogfile",info:"BigDFT log file as a dict"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"forces",required:!1,valid_types:"ArrayData",info:""},{name:"relaxed_structure",required:!1,valid_types:"StructureData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"total_energy",required:!1,valid_types:"Float",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:101,message:"Subprocess failed for relaxation"}]},class:"aiida_bigdft.workflows.relax:BigDFTRelaxWorkChain"}}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:2},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:2},{colorclass:"green",text:"Workflows",count:2},{colorclass:"orange",text:"Other (Data commands)",count:1}],pip_install_cmd:"pip install aiida-bigdft",is_installable:"True"},"aiida-castep":{code_home:"https://gitlab.com/bz1/aiida-castep",development_status:"stable",documentation_url:"https://aiida-castep.readthedocs.io/",entry_point_prefix:"castep",pip_url:"aiida-castep",plugin_info:"https://gitlab.com/bz1/aiida-castep/raw/master/setup.json",name:"aiida-castep",package_name:"aiida_castep",hosted_on:"gitlab.com",metadata:{description:"AiiDA plugin for CASTEP",author:"Bonan Zhu",author_email:"zhubonan@outlook.com",license:"MIT License",home_page:"https://github.com/zhubonan/aiida-castep",classifiers:["Framework :: AiiDA","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],version:"2.0.1"},aiida_version:">=2.0,<3.0",entry_points:{"aiida.calculations":{"castep.castep":{description:["Class representing a generic CASTEP calculation -"," This class should work for all types of calculations."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:"A node that defines the input parameters"},{name:"pseudos",required:!0,valid_types:"",info:"Use nodes for the pseudopotentails of one ofthe element in the structure. You should pass aa dictionary specifying the pseudpotential node foreach kind such as {O: }"},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure"},{name:"bs_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: bandstructure"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"elnes_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: elnes"},{name:"kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Use a node defining the kpoints for the calculation"},{name:"magres_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: magres"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"optics_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: optics"},{name:"parent_calc_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Use a remote folder as the parent folder. Useful for restarts."},{name:"phonon_fine_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: phonon, phonon+efield"},{name:"phonon_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: phonon, phonon+efield"},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"A node for additional settings"},{name:"spectral_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: spectral"},{name:"supercell_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: phonon"}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"Parsed results in a dictionary format."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:0,message:"Calculation terminated gracefully, end found"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:101,message:"SCF Cycles failed to reach convergence"},{status:103,message:"Stopped execuation due to detection of 'stop ' keyword in param file."},{status:104,message:"CASTEP generate error files. Check them for details"},{status:105,message:"Cannot find the end of calculation"},{status:106,message:"No output .castep files found"},{status:107,message:"Calculation self-terminated due to time limit"},{status:108,message:"No retrieve folder is found"},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:200,message:"UNKOWN ERROR"},{status:501,message:"At least one kpoints/spin has no empty bands - please rerun with increased nextra_bands."}]},class:"aiida_castep.calculations.castep:CastepCalculation"},"castep.ts":{description:["CASTEP calculation for transition state search. Use an extra input product structure."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:"A node that defines the input parameters"},{name:"product_structure",required:!0,valid_types:"StructureData",info:"Product structure for transition state search."},{name:"pseudos",required:!0,valid_types:"",info:"Use nodes for the pseudopotentails of one ofthe element in the structure. You should pass aa dictionary specifying the pseudpotential node foreach kind such as {O: }"},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure"},{name:"bs_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: bandstructure"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"elnes_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: elnes"},{name:"kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Use a node defining the kpoints for the calculation"},{name:"magres_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: magres"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"optics_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: optics"},{name:"parent_calc_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Use a remote folder as the parent folder. Useful for restarts."},{name:"phonon_fine_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: phonon, phonon+efield"},{name:"phonon_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: phonon, phonon+efield"},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"A node for additional settings"},{name:"spectral_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: spectral"},{name:"supercell_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Extra kpoints input for task: phonon"}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"Parsed results in a dictionary format."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:0,message:"Calculation terminated gracefully, end found"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:101,message:"SCF Cycles failed to reach convergence"},{status:103,message:"Stopped execuation due to detection of 'stop ' keyword in param file."},{status:104,message:"CASTEP generate error files. Check them for details"},{status:105,message:"Cannot find the end of calculation"},{status:106,message:"No output .castep files found"},{status:107,message:"Calculation self-terminated due to time limit"},{status:108,message:"No retrieve folder is found"},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:200,message:"UNKOWN ERROR"},{status:501,message:"At least one kpoints/spin has no empty bands - please rerun with increased nextra_bands."}]},class:"aiida_castep.calculations.castep:CastepTSCalculation"}},"aiida.cmdline.data":{"castep-helper":"aiida_castep.cmdline.helper_cmd:helper_cmd","castep-pseudos":"aiida_castep.cmdline.otfg_cmd:pseudos_cmd"},"aiida.data":{"castep.otfgdata":"aiida_castep.data.otfg:OTFGData","castep.uspdata":"aiida_castep.data.usp:UspData"},"aiida.groups":{"castep.otfg":"aiida_castep.data.otfg:OTFGGroup"},"aiida.parsers":{"castep.castep":"aiida_castep.parsers.castep:CastepParser"},"aiida.tests":{"castep.calculation":"aiida_castep.tests.dbtests.dbtestcalculation"},"aiida.tools.calculations":{"castep.castep":"aiida_castep.calculations.tools:CastepCalcTools"},"aiida.workflows":{"castep.altrelax":{description:["A relaxation workflow that alternates between fixed cell and unfixed cell"," This is meidate the problem in CASTEP where if the cell is partially constraints"," the convergence would be very slow.",""," To overcome this problem, the structure should be relaxed with cell constraints"," then restart with fixed cell and repeat.",""," Following fields can be used in ``relax_options``",""," :var_cell_iter_max: Maximum iterations in variable cell relaxation, default to 10",""," :fix_cell_iter_max: Maximum iterations in fixed cell relaxation, default to 20"],spec:{inputs:[{name:"base",required:!0,valid_types:"Data",info:""},{name:"calc",required:!0,valid_types:"Data",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:"Structure to be used for relaxation."},{name:"clean_workdir",required:!1,valid_types:"Bool, NoneType",info:"Wether to clean the workdir of the calculations at the end of the workchain. The default is not performing any cleaning."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"relax_options",required:!1,valid_types:"Dict, NoneType",info:"Options for relaxation."}],outputs:[{name:"output_bands",required:!0,valid_types:"BandsData",info:""},{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:""},{name:"output_array",required:!1,valid_types:"ArrayData",info:""},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The relaxed structure."},{name:"output_trajectory",required:!1,valid_types:"ArrayData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:101,message:"Subprocess lauched has failed in the relax stage"},{status:102,message:"Geometry optimisation is not converged but the maximum iteration is exceeded."},{status:201,message:"NO cell_constraints find in the input"}]},class:"aiida_castep.workflows.relax:CastepAlterRelaxWorkChain"},"castep.bands":{description:["Workchain for running bands calculation.",""," This workchain does the following:",""," 1. Relax the structure if requested (eg. inputs passed to the relax namespace)."," 2. Optionally: Do a SCF singlepoint calculation"," 3. Do combined SCF + non-SCF calculation for bands and dos.",""," Inputs must be passed for the SCF calculation (dispatched to bands and DOS),"," others are optional.",""," Input for bands and dos calculations are optional. However, if they are needed, the full list of inputs must"," be passed. For the `parameters` node, one may choose to only specify those fields that need to be updated."],spec:{inputs:[{name:"scf",required:!0,valid_types:"Data",info:"Inputs for SCF workchain, mandatory. Used as template for bands/dos if not supplied separately"},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure"},{name:"bands",required:!1,valid_types:"Data",info:"Inputs for bands calculation, if needed"},{name:"bands_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Explicit kpoints for the bands"},{name:"bands_kpoints_distance",required:!1,valid_types:"Float, NoneType",info:"Spacing for band distances, used by seekpath"},{name:"clean_children_workdir",required:!1,valid_types:"Str, NoneType",info:"What part of the called children to clean"},{name:"dos",required:!1,valid_types:"Data",info:"Inputs for DOS calculation, if needed"},{name:"dos_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Kpoints for running DOS calculations"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"only_dos",required:!1,valid_types:"",info:"Flag for running only DOS calculations"},{name:"options",required:!1,valid_types:"",info:"Options for this workchain. Supported keywords: dos_smearing, dos_npoints."},{name:"relax",required:!1,valid_types:"Data",info:"Inputs for Relaxation workchain, if needed"},{name:"run_separate_scf",required:!1,valid_types:"",info:"Flag for running a separate SCF calculation, default to False"}],outputs:[{name:"band_structure",required:!0,valid_types:"",info:"Computed band structure with labels"},{name:"dos_bands",required:!1,valid_types:"",info:"Bands from the DOS calculation"},{name:"primitive_structure",required:!1,valid_types:"",info:"Primitive structure used for band structure calculations"},{name:"seekpath_parameters",required:!1,valid_types:"",info:"Parameters used by seekpath"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:501,message:"Relaxation workchain failed"},{status:502,message:"SCF workchain failed"},{status:503,message:"Band structure workchain failed"},{status:504,message:"DOS workchain failed"}]},class:"aiida_castep.workflows.bands:CastepBandsWorkChain"},"castep.base":{description:["A basic workchain for generic CASTEP calculations."," We try to handle erros such as walltime exceeded or SCF not converged"],spec:{inputs:[{name:"calc",required:!0,valid_types:"Data",info:""},{name:"calc_options",required:!1,valid_types:"Dict, NoneType",info:"Options to be passed to calculations's metadata.options"},{name:"clean_workdir",required:!1,valid_types:"Bool, NoneType",info:"Wether to clean the workdir of the calculations or not, the default is not clean."},{name:"continuation_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Use a remote folder as the parent folder. Useful for restarts."},{name:"ensure_gamma_centering",required:!1,valid_types:"Bool, NoneType",info:"Ensure the kpoint grid is gamma centred."},{name:"kpoints_spacing",required:!1,valid_types:"Float, NoneType",info:"Kpoint spacing"},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of restarts"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:"Options specific to the workchain.Avaliable options: queue_wallclock_limit, use_castep_bin"},{name:"pseudos_family",required:!1,valid_types:"Str, NoneType",info:"Pseudopotential family to be used"},{name:"reuse_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Use a remote folder as the parent folder. Useful for restarts."}],outputs:[{name:"output_bands",required:!0,valid_types:"BandsData",info:""},{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:""},{name:"output_array",required:!1,valid_types:"ArrayData",info:""},{name:"output_structure",required:!1,valid_types:"StructureData",info:""},{name:"output_trajectory",required:!1,valid_types:"ArrayData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:200,message:"The maximum number of iterations has been exceeded"},{status:201,message:"The maximum length of the wallclocks has been exceeded"},{status:301,message:"CASTEP generated error files and is not recoverable"},{status:302,message:"Cannot reach SCF convergence despite restart efforts"},{status:400,message:"The stop flag has been put in the .param file to request termination of the calculation."},{status:900,message:"Input validate is failed"},{status:901,message:"Completed one iteration but found not calculation returned"},{status:1e3,message:"Error is not known"}]},class:"aiida_castep.workflows.base:CastepBaseWorkChain"},"castep.relax":{description:["WorkChain to relax structures."," Restart the relaxation calculation until the structure is fully relaxed."," Each CASTEP relaxation may finish without error with not fully relaxed structure"," if the number of iteration is exceeded (*geom_max_iter*)."," This workchain try to restart such calculations (wrapped in CastepBaseWorkChain)"," until the structure is fully relaxed",""," ``relax_options`` is a Dict of the options avaliable fields are:",""," - restart_mode: mode of restart, choose from ``reuse`` (default), ``structure``,"," ``continuation``."," - bypass: Bypass relaxation control - e.g. no checking of the convergence."," Can be used for doing singlepoint calculation."],spec:{inputs:[{name:"base",required:!0,valid_types:"Data",info:""},{name:"calc",required:!0,valid_types:"Data",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:"Structure to be used for relaxation."},{name:"clean_workdir",required:!1,valid_types:"Bool, NoneType",info:"Wether to clean the workdir of the calculations at the end of the workchain. The default is not performing any cleaning."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"relax_options",required:!1,valid_types:"Dict, NoneType",info:"Options for relaxation."}],outputs:[{name:"output_bands",required:!0,valid_types:"BandsData",info:""},{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:""},{name:"output_array",required:!1,valid_types:"ArrayData",info:""},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The relaxed structure."},{name:"output_trajectory",required:!1,valid_types:"ArrayData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:101,message:"Subprocess lauched has failed in the relax stage"},{status:102,message:"Geometry optimisation is not converged but the maximum iteration is exceeded."}]},class:"aiida_castep.workflows.relax:CastepRelaxWorkChain"}},console_scripts:{"castep.mock":"aiida_castep.cmdline.mock_castep:mock_castep"}},commits_count:10,summaryinfo:[{colorclass:"blue",text:"Calculations",count:2},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:2},{colorclass:"green",text:"Workflows",count:4},{colorclass:"purple",text:"Console scripts",count:1},{colorclass:"orange",text:"Other (Data commands, Groups, Tests, ...)",count:5}],pip_install_cmd:"pip install aiida-castep",is_installable:"True"},"aiida-catmap":{code_home:"https://github.com/sudarshanv01/aiida-catmap",entry_point_prefix:"catmap",name:"aiida-catmap",package_name:"aiida_catmap",hosted_on:"github.com",metadata:{author:"Sudarshan Vijay",author_email:"vijays@fysik.dtu.dk",version:"0.2.0a0",description:"AiiDA package that interfaces with Kinetic modelling code CatMAP",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Framework :: AiiDA"]},aiida_version:">=1.1.0,<2.0.0",entry_points:{"aiida.calculations":{catmap:"aiida_catmap.calculations.catmap:CatMAPCalculation"},"aiida.parsers":{catmap:"aiida_catmap.parsers.catmap:CatMAPParser"}},commits_count:0,development_status:"planning",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"See source code repository."},"aiida-catmat":{code_home:"https://github.com/pzarabadip/aiida-catmat",entry_point_prefix:"catmat",development_status:"beta",documentation_url:"https://aiida-catmat.readthedocs.io/",pip_url:"aiida-catmat",name:"aiida-catmat",package_name:"aiida_catmat",hosted_on:"github.com",metadata:{description:"Collection of AiiDA WorkChains Developed in Morgan Group",author:"Pezhman Zarabadi-Poor",author_email:"pzarabadip@gmail.com",license:"MIT License",home_page:"https://github.com/pzarabadip/aiida-catmat",classifiers:["Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],version:"1.0.0b0"},aiida_version:null,entry_points:{"aiida.parsers":{vasp_base_parser:"aiida_catmat.parsers:VaspBaseParser"},"aiida.workflows":{"vasp.base":"aiida_catmat.workchains:VaspBaseWorkChain","catmat.vasp_multistage":"aiida_catmat.workchains:VaspMultiStageWorkChain","catmat.vasp_converge":"aiida_catmat.workchains:VaspConvergeWorkChain","catmat.vasp_catmat":"aiida_catmat.workchains:VaspCatMatWorkChain","catmat.vasp_multistage_ddec":"aiida_catmat.workchains:VaspMultiStageDdecWorkChain"}},commits_count:0,summaryinfo:[{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:5}],pip_install_cmd:"pip install --pre aiida-catmat",is_installable:"False"},"aiida-ce":{code_home:"https://github.com/unkcpz/aiida-ce",development_status:"beta",entry_point_prefix:"ce",pip_url:"git+https://github.com/unkcpz/aiida-ce",name:"aiida-ce",package_name:"aiida_ce",hosted_on:"github.com",metadata:{author:"unkcpz",author_email:"morty.yu@yahoo.com",version:"0.1.0a0",description:"AiiDA plugin for running cluster expansion using icet.",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Framework :: AiiDA"]},aiida_version:">=1.0.0,<2.0.0",entry_points:{"aiida.data":{ce:"aiida_ce.data:DiffParameters","ce.structures":"aiida_ce.data.structure_set:StructureSet","ce.cluster":"aiida_ce.data.cluster:ClusterSpaceData"},"aiida.calculations":{"ce.genenum":"aiida_ce.calculations.genenum:EnumCalculation","ce.gensqs":"aiida_ce.calculations.gensqs:SqsCalculation","ce.train":"aiida_ce.calculations.train:TrainCalculation"},"aiida.parsers":{"ce.genenum":"aiida_ce.parsers.genenum:EnumParser","ce.gensqs":"aiida_ce.parsers.gensqs:SqsParser","ce.train":"aiida_ce.parsers.train:TrainParser"},"aiida.cmdline.data":{ce:"aiida_ce.cli:data_cli"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:3},{colorclass:"brown",text:"Parsers",count:3},{colorclass:"red",text:"Data",count:3},{colorclass:"orange",text:"Other (Data commands)",count:1}],pip_install_cmd:"pip install git+https://github.com/unkcpz/aiida-ce",is_installable:"True"},"aiida-champ":{code_home:"https://github.com/TREX-CoE/aiida-champ",development_status:"beta",documentation_url:"http://aiida-champ.readthedocs.io/",entry_point_prefix:"champ",pip_url:"aiida-champ",name:"aiida-champ",package_name:"aiida_champ",hosted_on:"github.com",metadata:{description:"AiiDA plugin that wraps the vmc executable of CHAMP code for computing the total energy and much more stuff.",author:"Ravindra Shinde",author_email:"r.l.shinde@utwente.nl",license:"MIT",home_page:"https://github.com/neelravi/aiida-champ",classifiers:["Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python"],version:"1.2.6"},aiida_version:null,entry_points:{"aiida.data":{CHAMP:"aiida_champ.data:CHAMPParameters"},"aiida.calculations":{CHAMP:{description:["AiiDA calculation plugin wrapping the CHAMP's vmc executable.",""," aiida-champ can be used to manage the workflow of a vmc/dmc calculation of the CHAMP code.",""," Author :: Ravindra Shinde"," Email :: r.l.shinde@utwente.nl"],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"determinants",required:!0,valid_types:"SinglefileData",info:"Input determinants file"},{name:"filemain",required:!0,valid_types:"SinglefileData",info:"Input File"},{name:"molecule",required:!0,valid_types:"SinglefileData",info:"Molecule structure File"},{name:"ecp1",required:!1,valid_types:"SinglefileData",info:"Input ECP file for atom type 1"},{name:"ecp2",required:!1,valid_types:"SinglefileData",info:"Input ECP file for atom type 2"},{name:"jastrow",required:!1,valid_types:"SinglefileData",info:"Input jastrow file"},{name:"jastrowder",required:!1,valid_types:"SinglefileData",info:"Input jastrowder file"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"numericalbasis1",required:!1,valid_types:"SinglefileData",info:"Input numerical basis file atom 1"},{name:"numericalbasis2",required:!1,valid_types:"SinglefileData",info:"Input numerical basis file atom 2"},{name:"numericalbasisinfo",required:!1,valid_types:"SinglefileData",info:"Input numerical basis information file"},{name:"orbitals",required:!1,valid_types:"SinglefileData",info:"Input orbitals file"},{name:"symmetry",required:!1,valid_types:"SinglefileData",info:"Input symmetry file"},{name:"trexio",required:!1,valid_types:"SinglefileData",info:"Input trexio hdf5 file"}],outputs:[{name:"Output",required:!0,valid_types:"SinglefileData",info:"Output file of the VMC/DMC calculation"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"Energy",required:!1,valid_types:"Float",info:"Output total energy of the VMC/DMC calculation"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:300,message:"Calculation did not produce all expected output files."}]},class:"aiida_champ.calculations:CHAMPCalculation"}},"aiida.parsers":{CHAMP:"aiida_champ.parsers:CHAMPParser"},"aiida.cmdline.data":{CHAMP:"aiida_champ.cli:data_cli"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:1},{colorclass:"orange",text:"Other (Data commands)",count:1}],pip_install_cmd:"pip install aiida-champ",is_installable:"True"},"aiida-codtools":{code_home:"https://github.com/aiidateam/aiida-codtools",documentation_url:"https://aiida-codtools.readthedocs.io/",entry_point_prefix:"codtools",pip_url:"aiida-codtools",plugin_info:"https://raw.githubusercontent.com/aiidateam/aiida-codtools/master/setup.json",name:"aiida-codtools",package_name:"aiida_codtools",hosted_on:"github.com",metadata:{description:"The Official AiiDA plugin for the cod-tools package.",author_email:"The AiiDA team ",classifiers:["Development Status :: 5 - Production/Stable","Framework :: AiiDA","License :: OSI Approved :: MIT License","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],version:"3.1.0"},aiida_version:">=2.1,<3.0",entry_points:{"aiida.calculations":{"codtools.cif_base":{description:["Generic `CalcJob` implementation that can easily be extended to work with any of the `cod-tools` scripts."],spec:{inputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF to be processed."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Command line parameters."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"messages",required:!1,valid_types:"Dict",info:"Warning and error messages returned by script."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Neither the output for the error file could be read from the retrieved folder."},{status:311,message:"The output file could not be read from the retrieved folder."},{status:312,message:"The error file could not be read from the retrieved folder."},{status:313,message:"The output file is empty."},{status:320,message:"Invalid command line option passed."},{status:400,message:"The output file could not be parsed."},{status:410,message:"The output file could not be parsed into a CifData object."}]},class:"aiida_codtools.calculations.cif_base:CifBaseCalculation"},"codtools.cif_cell_contents":{description:["CalcJob plugin for the `cif_cell_contents` script of the `cod-tools` package."],spec:{inputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF to be processed."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Command line parameters."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"formulae",required:!0,valid_types:"Dict",info:"A dictionary of formulae present in the CIF."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"messages",required:!1,valid_types:"Dict",info:"Warning and error messages returned by script."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Neither the output for the error file could be read from the retrieved folder."},{status:311,message:"The output file could not be read from the retrieved folder."},{status:312,message:"The error file could not be read from the retrieved folder."},{status:313,message:"The output file is empty."},{status:320,message:"Invalid command line option passed."},{status:400,message:"The output file could not be parsed."},{status:410,message:"The output file could not be parsed into a CifData object."}]},class:"aiida_codtools.calculations.cif_cell_contents:CifCellContentsCalculation"},"codtools.cif_cod_check":{description:["CalcJob plugin for the `cif_cod_check` script of the `cod-tools` package."],spec:{inputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF to be processed."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Command line parameters."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"messages",required:!0,valid_types:"Dict",info:"Warning and error messages returned by the script."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Neither the output for the error file could be read from the retrieved folder."},{status:311,message:"The output file could not be read from the retrieved folder."},{status:312,message:"The error file could not be read from the retrieved folder."},{status:313,message:"The output file is empty."},{status:320,message:"Invalid command line option passed."},{status:400,message:"The output file could not be parsed."},{status:410,message:"The output file could not be parsed into a CifData object."}]},class:"aiida_codtools.calculations.cif_cod_check:CifCodCheckCalculation"},"codtools.cif_cod_deposit":{description:["CalcJob plugin for the `cif_cod_deposit` script of the `cod-tools` package."],spec:{inputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF to be processed."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Command line parameters."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"messages",required:!1,valid_types:"Dict",info:"Warning and error messages returned by script."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Neither the output for the error file could be read from the retrieved folder."},{status:300,message:"The deposition failed for unknown reasons."},{status:310,message:"The deposition failed because the input was invalid."},{status:311,message:"The output file could not be read from the retrieved folder."},{status:312,message:"The error file could not be read from the retrieved folder."},{status:313,message:"The output file is empty."},{status:320,message:"Invalid command line option passed."},{status:400,message:"The output file could not be parsed."},{status:410,message:"The output file could not be parsed into a CifData object."},{status:410,message:"The deposition failed because one or more CIFs already exist in the COD."},{status:420,message:"The structure is unchanged and so deposition is unnecessary."}]},class:"aiida_codtools.calculations.cif_cod_deposit:CifCodDepositCalculation"},"codtools.cif_cod_numbers":{description:["CalcJob plugin for the `cif_cod_numbers` script of the `cod-tools` package."],spec:{inputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF to be processed."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Command line parameters."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"numbers",required:!0,valid_types:"Dict",info:"Mapping of COD IDs found with their formula and count."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"messages",required:!1,valid_types:"Dict",info:"Warning and error messages returned by script."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Neither the output for the error file could be read from the retrieved folder."},{status:311,message:"The output file could not be read from the retrieved folder."},{status:312,message:"The error file could not be read from the retrieved folder."},{status:313,message:"The output file is empty."},{status:320,message:"Invalid command line option passed."},{status:400,message:"The output file could not be parsed."},{status:410,message:"The output file could not be parsed into a CifData object."}]},class:"aiida_codtools.calculations.cif_cod_numbers:CifCodNumbersCalculation"},"codtools.cif_filter":{description:["CalcJob plugin for the `cif_filter` script of the `cod-tools` package."],spec:{inputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF to be processed."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Command line parameters."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF produced by the script."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"messages",required:!1,valid_types:"Dict",info:"Warning and error messages returned by script."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Neither the output for the error file could be read from the retrieved folder."},{status:311,message:"The output file could not be read from the retrieved folder."},{status:312,message:"The error file could not be read from the retrieved folder."},{status:313,message:"The output file is empty."},{status:320,message:"Invalid command line option passed."},{status:400,message:"The output file could not be parsed."},{status:410,message:"The output file could not be parsed into a CifData object."}]},class:"aiida_codtools.calculations.cif_filter:CifFilterCalculation"},"codtools.cif_select":{description:["CalcJob plugin for the `cif_select` script of the `cod-tools` package."],spec:{inputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF to be processed."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Command line parameters."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF produced by the script."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"messages",required:!1,valid_types:"Dict",info:"Warning and error messages returned by script."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Neither the output for the error file could be read from the retrieved folder."},{status:311,message:"The output file could not be read from the retrieved folder."},{status:312,message:"The error file could not be read from the retrieved folder."},{status:313,message:"The output file is empty."},{status:320,message:"Invalid command line option passed."},{status:400,message:"The output file could not be parsed."},{status:410,message:"The output file could not be parsed into a CifData object."}]},class:"aiida_codtools.calculations.cif_select:CifSelectCalculation"},"codtools.cif_split_primitive":{description:["CalcJob plugin for the `cif_split_primitive` script of the `cod-tools` package."],spec:{inputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CIF to be processed."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Command line parameters."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"cifs",required:!0,valid_types:"CifData",info:"The CIFs produced by the script."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"messages",required:!1,valid_types:"Dict",info:"Warning and error messages returned by script."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Neither the output for the error file could be read from the retrieved folder."},{status:311,message:"The output file could not be read from the retrieved folder."},{status:312,message:"The error file could not be read from the retrieved folder."},{status:313,message:"The output file is empty."},{status:320,message:"Invalid command line option passed."},{status:400,message:"The output file could not be parsed."},{status:410,message:"The output file could not be parsed into a CifData object."}]},class:"aiida_codtools.calculations.cif_split_primitive:CifSplitPrimitiveCalculation"},"codtools.primitive_structure_from_cif":{description:["Attempt to parse the given `CifData` and create a `StructureData` from it.",""," First the raw CIF file is parsed with the given `parse_engine`. The resulting `StructureData` is then passed through"," SeeKpath to try and get the primitive cell. If that is successful, important structural parameters as determined by"," SeeKpath will be set as extras on the structure node which is then returned as output.",""," :param cif: the `CifData` node"," :param parse_engine: the parsing engine, supported libraries 'ase' and 'pymatgen'"," :param symprec: a `Float` node with symmetry precision for determining primitive cell in SeeKpath"," :param site_tolerance: a `Float` node with the fractional coordinate distance tolerance for finding overlapping"," sites. This will only be used if the parse_engine is pymatgen"," :return: the primitive `StructureData` as determined by SeeKpath"],spec:{inputs:[{name:"cif",required:!0,valid_types:"Data",info:"the `CifData` node"},{name:"parse_engine",required:!0,valid_types:"Data",info:"the parsing engine, supported libraries 'ase' and 'pymatgen'"},{name:"site_tolerance",required:!0,valid_types:"Data",info:"a `Float` node with the fractional coordinate distance tolerance for finding overlapping\nsites. This will only be used if the parse_engine is pymatgen"},{name:"symprec",required:!0,valid_types:"Data",info:"a `Float` node with symmetry precision for determining primitive cell in SeeKpath"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_codtools.calculations.functions.primitive_structure_from_cif:primitive_structure_from_cif"}},"aiida.parsers":{"codtools.cif_base":"aiida_codtools.parsers.cif_base:CifBaseParser","codtools.cif_cell_contents":"aiida_codtools.parsers.cif_cell_contents:CifCellContentsParser","codtools.cif_cod_check":"aiida_codtools.parsers.cif_cod_check:CifCodCheckParser","codtools.cif_cod_deposit":"aiida_codtools.parsers.cif_cod_deposit:CifCodDepositParser","codtools.cif_cod_numbers":"aiida_codtools.parsers.cif_cod_numbers:CifCodNumbersParser","codtools.cif_split_primitive":"aiida_codtools.parsers.cif_split_primitive:CifSplitPrimitiveParser"},"aiida.workflows":{"codtools.cif_clean":{description:["WorkChain to clean a `CifData` node using the `cif_filter` and `cif_select` scripts of `cod-tools`.",""," It will first run `cif_filter` to correct syntax errors, followed by `cif_select` which will canonicalize the tags."," If a group is passed for the `group_structure` input, the atomic structure library defined by the `engine` input"," will be used to parse the final cleaned `CifData` to construct a `StructureData` object, which will then be passed"," to the `SeeKpath` library to analyze it and return the primitive structure"],spec:{inputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The CifData node that is to be cleaned."},{name:"cif_filter",required:!0,valid_types:"Data",info:""},{name:"cif_select",required:!0,valid_types:"Data",info:""},{name:"group_cif",required:!1,valid_types:"Group, NoneType",info:"An optional Group to which the final cleaned CifData node will be added."},{name:"group_structure",required:!1,valid_types:"Group, NoneType",info:"An optional Group to which the final reduced StructureData node will be added."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"parse_engine",required:!1,valid_types:"Str",info:"The atomic structure engine to parse the cif and create the structure."},{name:"site_tolerance",required:!1,valid_types:"Float",info:"The fractional coordinate distance tolerance for finding overlapping sites (pymatgen only)."},{name:"symprec",required:!1,valid_types:"Float",info:"The symmetry precision used by SeeKpath for crystal symmetry refinement."}],outputs:[{name:"cif",required:!0,valid_types:"CifData",info:"The cleaned CifData node."},{name:"structure",required:!1,valid_types:"StructureData",info:"The primitive cell structure created with SeeKpath from the cleaned CifData."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:401,message:"The CifFilterCalculation step failed."},{status:402,message:"The CifSelectCalculation step failed."},{status:410,message:"The cleaned CifData contains sites with unknown species."},{status:411,message:"The cleaned CifData defines no atomic sites."},{status:412,message:"The cleaned CifData defines sites with attached hydrogens with incomplete positional data."},{status:413,message:"The cleaned CifData defines sites with invalid atomic occupancies."},{status:414,message:"Failed to parse a StructureData from the cleaned CifData."},{status:420,message:"SeeKpath failed to determine the primitive structure."},{status:421,message:"SeeKpath detected inconsistent symmetry operations."}]},class:"aiida_codtools.workflows.cif_clean:CifCleanWorkChain"}},console_scripts:{"aiida-codtools":"aiida_codtools.cli:cmd_root"}},commits_count:4,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:9},{colorclass:"brown",text:"Parsers",count:6},{colorclass:"green",text:"Workflows",count:1},{colorclass:"purple",text:"Console scripts",count:1}],pip_install_cmd:"pip install aiida-codtools",is_installable:"True"},"aiida-core":{code_home:"https://github.com/aiidateam/aiida-core",development_status:"stable",documentation_url:"https://aiida-core.readthedocs.io/",entry_point_prefix:"",package_name:"aiida",pip_url:"aiida-core",plugin_info:"https://raw.githubusercontent.com/aiidateam/aiida-core/master/setup.json",name:"aiida-core",hosted_on:"github.com",metadata:{description:"AiiDA is a workflow manager for computational science with a strong focus on provenance, performance and extensibility.",author_email:"The AiiDA team ",classifiers:["Development Status :: 5 - Production/Stable","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering"],version:"2.4.0"},aiida_version:"==2.4.0",entry_points:{"aiida.calculations":{"core.arithmetic.add":{description:["`CalcJob` implementation to add two numbers using bash for testing and demonstration purposes."],spec:{inputs:[{name:"x",required:!0,valid_types:"Int, Float",info:"The left operand."},{name:"y",required:!0,valid_types:"Int, Float",info:"The right operand."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"sum",required:!0,valid_types:"Int, Float",info:"The sum of the left and right operand."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:310,message:"The output file could not be read."},{status:320,message:"The output file contains invalid output."},{status:410,message:"The sum of the operands is a negative number."}]},class:"aiida.calculations.arithmetic.add:ArithmeticAddCalculation"},"core.templatereplacer":{description:["Simple stub of a plugin that can be used to replace some text in a given template."," Can be used for many different codes, or as a starting point to develop a new plugin.",""," This simple plugin takes two node inputs, both of type Dict, with the labels"," 'parameters' and 'template'",""," You can also add other SinglefileData nodes as input, that will be copied according to"," what is written in 'template' (see below).",""," * parameters: a set of parameters that will be used for substitution.",""," * template: can contain the following parameters:",""," * input_file_template: a string with substitutions to be managed with the format()"," function of python, i.e. if you want to substitute a variable called 'varname', you write"," {varname} in the text. See http://www.python.org/dev/peps/pep-3101/ for more"," details. The replaced file will be the input file.",""," * input_file_name: a string with the file name for the input. If it is not provided, no"," file will be created.",""," * output_file_name: a string with the file name for the output. If it is not provided, no"," redirection will be done and the output will go in the scheduler output file.",""," * cmdline_params: a list of strings, to be passed as command line parameters."," Each one is substituted with the same rule of input_file_template. Optional",""," * input_through_stdin: if True, the input file name is passed via stdin. Default is False if missing.",""," * files_to_copy: if defined, a list of tuple pairs, with format ('link_name', 'dest_rel_path');"," for each tuple, an input link to this calculation is looked for, with link labeled 'link_label',"," and with file type 'Singlefile', and the content is copied to a remote file named 'dest_rel_path'"," Errors are raised in the input links are non-existent, or of the wrong type, or if there are"," unused input files.",""," * retrieve_temporary_files: a list of relative filepaths, that if defined, will be retrieved and"," temporarily stored in an unstored FolderData node that will be available during the"," Parser.parser_with_retrieved call under the key specified by the Parser.retrieved_temporary_folder key"],spec:{inputs:[{name:"template",required:!0,valid_types:"Dict",info:"A template for the input file."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"files",required:!1,valid_types:"RemoteData, SinglefileData",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters used to replace placeholders in the template."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:301,message:"The temporary retrieved folder data node could not be accessed."},{status:305,message:"The `template` input node did not specify the key `output_file_name`."},{status:310,message:"The output file could not be read from the retrieved folder."},{status:311,message:"A temporary retrieved file could not be read from the temporary retrieved folder."},{status:320,message:"The output file contains invalid output."}]},class:"aiida.calculations.templatereplacer:TemplatereplacerCalculation"},"core.transfer":{description:["Utility to copy files from different FolderData and RemoteData nodes into a single place.",""," The final destination for these files can be either the local repository (by creating a"," new FolderData node to store them) or in the remote computer (by leaving the files in a"," new remote folder saved in a RemoteData node).",""," Only files from the local computer and from remote folders in the same external computer"," can be moved at the same time with a single instance of this CalcJob.",""," The user needs to provide three inputs:",""," * ``instructions``: a dict node specifying which files to copy from which nodes."," * ``source_nodes``: a dict of nodes, each with a unique identifier label as its key."," * ``metadata.computer``: the computer that contains the remote files and will contain"," the final RemoteData node.",""," The ``instructions`` dict must have the ``retrieve_files`` flag. The CalcJob will create a"," new folder in the remote machine (``RemoteData``) and put all the files there and will either:",""," (1) leave them there (``retrieve_files = False``) or ..."," (2) retrieve all the files and store them locally in a ``FolderData`` (``retrieve_files = True``)",""," The `instructions` dict must also contain at least one list with specifications of which files"," to copy and from where. All these lists take tuples of 3 that have the following format:",""," .. code-block:: python",""," ( source_node_key, path_to_file_in_source, path_to_file_in_target)",""," where the ``source_node_key`` has to be the respective one used when providing the node in the"," ``source_nodes`` input nodes dictionary.","",""," The two main lists to include are ``local_files`` (for files to be taken from FolderData nodes)"," and ``remote_files`` (for files to be taken from RemoteData nodes). Alternatively, files inside"," of RemoteData nodes can instead be put in the ``symlink_files`` list: the only difference is that"," files from the first list will be fully copied in the target RemoteData folder, whereas for the"," files in second list only a symlink to the original file will be created there. This will only"," affect the content of the final RemoteData target folder, but in both cases the full file will"," be copied back in the local target FolderData (if ``retrieve_files = True``)."],spec:{inputs:[{name:"instructions",required:!0,valid_types:"Dict",info:"A dictionary containing the `retrieve_files` flag and at least one of the file lists:`local_files`, `remote_files` and/or `symlink_files`."},{name:"source_nodes",required:!0,valid_types:"FolderData, RemoteData",info:"All the nodes that contain files referenced in the instructions."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"}]},class:"aiida.calculations.transfer:TransferCalculation"}},"aiida.calculations.importers":{"core.arithmetic.add":"aiida.calculations.importers.arithmetic.add:ArithmeticAddCalculationImporter"},"aiida.calculations.monitors":{"core.always_kill":"aiida.calculations.monitors.base:always_kill"},"aiida.cmdline.computer.configure":{"core.local":"aiida.transports.plugins.local:CONFIGURE_LOCAL_CMD","core.ssh":"aiida.transports.plugins.ssh:CONFIGURE_SSH_CMD"},"aiida.cmdline.data":{"core.array":"aiida.cmdline.commands.cmd_data.cmd_array:array","core.bands":"aiida.cmdline.commands.cmd_data.cmd_bands:bands","core.cif":"aiida.cmdline.commands.cmd_data.cmd_cif:cif","core.dict":"aiida.cmdline.commands.cmd_data.cmd_dict:dictionary","core.remote":"aiida.cmdline.commands.cmd_data.cmd_remote:remote","core.singlefile":"aiida.cmdline.commands.cmd_data.cmd_singlefile:singlefile","core.structure":"aiida.cmdline.commands.cmd_data.cmd_structure:structure","core.trajectory":"aiida.cmdline.commands.cmd_data.cmd_trajectory:trajectory","core.upf":"aiida.cmdline.commands.cmd_data.cmd_upf:upf"},"aiida.cmdline.data.structure.import":{},"aiida.data":{"core.array":"aiida.orm.nodes.data.array.array:ArrayData","core.array.bands":"aiida.orm.nodes.data.array.bands:BandsData","core.array.kpoints":"aiida.orm.nodes.data.array.kpoints:KpointsData","core.array.projection":"aiida.orm.nodes.data.array.projection:ProjectionData","core.array.trajectory":"aiida.orm.nodes.data.array.trajectory:TrajectoryData","core.array.xy":"aiida.orm.nodes.data.array.xy:XyData","core.base":"aiida.orm.nodes.data:BaseType","core.bool":"aiida.orm.nodes.data.bool:Bool","core.cif":"aiida.orm.nodes.data.cif:CifData","core.code":"aiida.orm.nodes.data.code.legacy:Code","core.code.containerized":"aiida.orm.nodes.data.code.containerized:ContainerizedCode","core.code.installed":"aiida.orm.nodes.data.code.installed:InstalledCode","core.code.portable":"aiida.orm.nodes.data.code.portable:PortableCode","core.dict":"aiida.orm.nodes.data.dict:Dict","core.enum":"aiida.orm.nodes.data.enum:EnumData","core.float":"aiida.orm.nodes.data.float:Float","core.folder":"aiida.orm.nodes.data.folder:FolderData","core.int":"aiida.orm.nodes.data.int:Int","core.jsonable":"aiida.orm.nodes.data.jsonable:JsonableData","core.list":"aiida.orm.nodes.data.list:List","core.numeric":"aiida.orm.nodes.data.numeric:NumericType","core.orbital":"aiida.orm.nodes.data.orbital:OrbitalData","core.remote":"aiida.orm.nodes.data.remote.base:RemoteData","core.remote.stash":"aiida.orm.nodes.data.remote.stash.base:RemoteStashData","core.remote.stash.folder":"aiida.orm.nodes.data.remote.stash.folder:RemoteStashFolderData","core.singlefile":"aiida.orm.nodes.data.singlefile:SinglefileData","core.str":"aiida.orm.nodes.data.str:Str","core.structure":"aiida.orm.nodes.data.structure:StructureData","core.upf":"aiida.orm.nodes.data.upf:UpfData"},"aiida.groups":{core:"aiida.orm.groups:Group","core.auto":"aiida.orm.groups:AutoGroup","core.import":"aiida.orm.groups:ImportGroup","core.upf":"aiida.orm.groups:UpfFamily"},"aiida.node":{data:"aiida.orm.nodes.data.data:Data",process:"aiida.orm.nodes.process.process:ProcessNode","process.calculation":"aiida.orm.nodes.process.calculation.calculation:CalculationNode","process.calculation.calcfunction":"aiida.orm.nodes.process.calculation.calcfunction:CalcFunctionNode","process.calculation.calcjob":"aiida.orm.nodes.process.calculation.calcjob:CalcJobNode","process.workflow":"aiida.orm.nodes.process.workflow.workflow:WorkflowNode","process.workflow.workchain":"aiida.orm.nodes.process.workflow.workchain:WorkChainNode","process.workflow.workfunction":"aiida.orm.nodes.process.workflow.workfunction:WorkFunctionNode"},"aiida.parsers":{"core.arithmetic.add":"aiida.parsers.plugins.arithmetic.add:ArithmeticAddParser","core.templatereplacer":"aiida.parsers.plugins.templatereplacer.parser:TemplatereplacerParser"},"aiida.schedulers":{"core.direct":"aiida.schedulers.plugins.direct:DirectScheduler","core.lsf":"aiida.schedulers.plugins.lsf:LsfScheduler","core.pbspro":"aiida.schedulers.plugins.pbspro:PbsproScheduler","core.sge":"aiida.schedulers.plugins.sge:SgeScheduler","core.slurm":"aiida.schedulers.plugins.slurm:SlurmScheduler","core.torque":"aiida.schedulers.plugins.torque:TorqueScheduler"},"aiida.storage":{"core.psql_dos":"aiida.storage.psql_dos.backend:PsqlDosBackend","core.sqlite_temp":"aiida.storage.sqlite_temp.backend:SqliteTempBackend","core.sqlite_zip":"aiida.storage.sqlite_zip.backend:SqliteZipBackend"},"aiida.tools.calculations":{},"aiida.tools.data.orbitals":{"core.orbital":"aiida.tools.data.orbital.orbital:Orbital","core.realhydrogen":"aiida.tools.data.orbital.realhydrogen:RealhydrogenOrbital"},"aiida.tools.dbexporters":{},"aiida.tools.dbimporters":{"core.cod":"aiida.tools.dbimporters.plugins.cod:CodDbImporter","core.icsd":"aiida.tools.dbimporters.plugins.icsd:IcsdDbImporter","core.materialsproject":"aiida.tools.dbimporters.plugins.materialsproject:MaterialsProjectImporter","core.mpds":"aiida.tools.dbimporters.plugins.mpds:MpdsDbImporter","core.mpod":"aiida.tools.dbimporters.plugins.mpod:MpodDbImporter","core.nninc":"aiida.tools.dbimporters.plugins.nninc:NnincDbImporter","core.oqmd":"aiida.tools.dbimporters.plugins.oqmd:OqmdDbImporter","core.pcod":"aiida.tools.dbimporters.plugins.pcod:PcodDbImporter","core.tcod":"aiida.tools.dbimporters.plugins.tcod:TcodDbImporter"},"aiida.transports":{"core.local":"aiida.transports.plugins.local:LocalTransport","core.ssh":"aiida.transports.plugins.ssh:SshTransport"},"aiida.workflows":{"core.arithmetic.add_multiply":{description:["Add two numbers and multiply it with a third."],spec:{inputs:[{name:"x",required:!0,valid_types:"Data",info:""},{name:"y",required:!0,valid_types:"Data",info:""},{name:"z",required:!0,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida.workflows.arithmetic.add_multiply:add_multiply"},"core.arithmetic.multiply_add":{description:["WorkChain to multiply two numbers and add a third, for testing and demonstration purposes."],spec:{inputs:[{name:"code",required:!0,valid_types:"AbstractCode",info:""},{name:"x",required:!0,valid_types:"Int",info:""},{name:"y",required:!0,valid_types:"Int",info:""},{name:"z",required:!0,valid_types:"Int",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"result",required:!0,valid_types:"Int",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:400,message:"The result is a negative number."}]},class:"aiida.workflows.arithmetic.multiply_add:MultiplyAddWorkChain"}},console_scripts:{runaiida:"aiida.cmdline.commands.cmd_run:run",verdi:"aiida.cmdline.commands.cmd_verdi:verdi"}},commits_count:336,summaryinfo:[{colorclass:"blue",text:"Calculations",count:3},{colorclass:"brown",text:"Parsers",count:2},{colorclass:"red",text:"Data",count:29},{colorclass:"green",text:"Workflows",count:2},{colorclass:"purple",text:"Console scripts",count:2},{colorclass:"orange",text:"Other (Calculations importers, Calculations monitors, Cmdline computer configure, ...)",count:47}],pip_install_cmd:"pip install aiida-core",is_installable:"True"},"aiida-cp2k":{code_home:"https://github.com/cp2k/aiida-cp2k",entry_point_prefix:"cp2k",pip_url:"aiida-cp2k",plugin_info:"https://raw.githubusercontent.com/cp2k/aiida-cp2k/master/setup.json",name:"aiida-cp2k",package_name:"aiida_cp2k",hosted_on:"github.com",metadata:{description:"The official AiiDA plugin for CP2K.",author:"The AiiDA team",classifiers:["Development Status :: 5 - Production/Stable","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python :: 3"],version:"2.0.0"},aiida_version:">=2.0.0,<3.0.0",entry_points:{"aiida.calculations":{cp2k:{description:["This is a Cp2kCalculation, subclass of JobCalculation, to prepare input for an ab-initio CP2K calculation.",""," For information on CP2K, refer to: https://www.cp2k.org."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:"The input parameters."},{name:"basissets",required:!1,valid_types:"",info:"A dictionary of basissets to be used in the calculations: key is the atomic symbol, value is either a single basisset or a list of basissets. If multiple basissets for a single symbol are passed, it is mandatory to specify a KIND section with a BASIS_SET keyword matching the names (or aliases) of the basissets."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"file",required:!1,valid_types:"SinglefileData, StructureData",info:"Additional input files."},{name:"kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Input kpoint mesh."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parent_calc_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Working directory of a previously ran calculation to restart from."},{name:"pseudos",required:!1,valid_types:"",info:"A dictionary of pseudopotentials to be used in the calculations: key is the atomic symbol, value is either a single pseudopotential or a list of pseudopotentials. If multiple pseudos for a single symbol are passed, it is mandatory to specify a KIND section with a PSEUDOPOTENTIAL keyword matching the names (or aliases) of the pseudopotentials."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Optional input parameters."},{name:"structure",required:!1,valid_types:"StructureData, NoneType",info:"The main input structure."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"The output dictionary containing results of the calculation."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_bands",required:!1,valid_types:"BandsData",info:"Computed electronic band structure."},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The relaxed output structure."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"The retrieved folder did not contain the required output file."},{status:301,message:"The output file could not be read."},{status:302,message:"The output file could not be parsed."},{status:303,message:"The output file was incomplete."},{status:304,message:'The output file contains the word "ABORT".'},{status:312,message:"The output structure could not be parsed."},{status:350,message:"The parser raised an unexpected exception."},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:500,message:"The ionic minimization cycle did not converge for the given thresholds."},{status:501,message:"The maximum number of optimization steps reached."}]},class:"aiida_cp2k.calculations:Cp2kCalculation"}},"aiida.parsers":{cp2k_advanced_parser:"aiida_cp2k.parsers:Cp2kAdvancedParser",cp2k_base_parser:"aiida_cp2k.parsers:Cp2kBaseParser",cp2k_tools_parser:"aiida_cp2k.parsers:Cp2kToolsParser"},"aiida.workflows":{"cp2k.base":{description:["Workchain to run a CP2K calculation with automated error handling and restarts."],spec:{inputs:[{name:"cp2k",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"The output dictionary containing results of the calculation."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"final_input_parameters",required:!1,valid_types:"Dict",info:"The input parameters used for the final calculation."},{name:"output_bands",required:!1,valid_types:"BandsData",info:"Computed electronic band structure."},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The relaxed output structure."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:300,message:"The calculation failed with an unidentified unrecoverable error."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:310,message:"The calculation failed with a known unrecoverable error."},{status:400,message:"The calculation didn't produce any data to restart from."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_cp2k.workchains:Cp2kBaseWorkChain"}}},commits_count:18,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:3},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install aiida-cp2k",is_installable:"True"},"aiida-crystal-dft":{code_home:"https://github.com/tilde-lab/aiida-crystal-dft",development_status:"beta",documentation_url:"https://github.com/tilde-lab/aiida-crystal-dft",entry_point_prefix:"crystal_dft",pip_url:"git+https://github.com/tilde-lab/aiida-crystal-dft",name:"aiida-crystal-dft",package_name:"aiida_crystal_dft",hosted_on:"github.com",metadata:{description:`Yet another AiiDA plugin for CRYSTAL code, mainly intended for use with the cloud infrastructures +(currently, MPDS)`,classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","License :: OSI Approved :: MIT License","Intended Audience :: Science/Research","Operating System :: OS Independent","Programming Language :: Python","Programming Language :: Python :: 3","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Programming Language :: Python :: 3.10","Topic :: Scientific/Engineering","Topic :: Scientific/Engineering :: Chemistry","Topic :: Scientific/Engineering :: Physics","Topic :: Scientific/Engineering :: Information Analysis"],author:"Andrey Sobolev, based on aiida-crystal17 plugin by Chris Sewell",author_email:"as@tilde.pro"},aiida_version:">=2.0.2",entry_points:{"aiida.data":{"crystal_dft.basis":"aiida_crystal_dft.data.basis:CrystalBasisData","crystal_dft.basis_family":"aiida_crystal_dft.data.basis_family:CrystalBasisFamilyData"},"aiida.calculations":{"crystal_dft.serial":{description:["No description available"],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:""},{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:""},{name:"basis",required:!1,valid_types:"CrystalBasisData",info:""},{name:"basis_family",required:!1,valid_types:"CrystalBasisFamilyData, NoneType",info:""},{name:"guess_oxistates",required:!1,valid_types:"Bool, NoneType",info:""},{name:"high_spin_preferred",required:!1,valid_types:"Bool, NoneType",info:""},{name:"is_magnetic",required:!1,valid_types:"Bool, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"spinlock_steps",required:!1,valid_types:"Int, NoneType",info:""},{name:"use_oxistates",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"oxidation_states",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_structure",required:!1,valid_types:"StructureData",info:""},{name:"output_trajectory",required:!1,valid_types:"TrajectoryData",info:""},{name:"output_wavefunction",required:!1,valid_types:"SinglefileData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"SCF calculation not converged"},{status:301,message:"Geometry optimization failed"},{status:302,message:"Unit cell not neutral"},{status:303,message:"Basis set linearly dependent"},{status:304,message:"Neighbour list too large"},{status:305,message:"No G-vectors left"},{status:306,message:"Collapsed geometry"},{status:307,message:"Closed shell run - spin polarization not allowed"},{status:308,message:"Parameters for model hessian not defined"},{status:309,message:"Fermi energy not in interval"},{status:310,message:"Insufficient indices for Madelung sums"},{status:350,message:"Internal memory error"},{status:360,message:"Inadequate elastic calculation: additional optimization needed"},{status:400,message:"Unknown error"},{status:401,message:"The retrieved folder data node could not be accessed"}]},class:"aiida_crystal_dft.calculations.serial:CrystalSerialCalculation"},"crystal_dft.parallel":{description:["No description available"],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:""},{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:""},{name:"basis",required:!1,valid_types:"CrystalBasisData",info:""},{name:"basis_family",required:!1,valid_types:"CrystalBasisFamilyData, NoneType",info:""},{name:"guess_oxistates",required:!1,valid_types:"Bool, NoneType",info:""},{name:"high_spin_preferred",required:!1,valid_types:"Bool, NoneType",info:""},{name:"is_magnetic",required:!1,valid_types:"Bool, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"spinlock_steps",required:!1,valid_types:"Int, NoneType",info:""},{name:"use_oxistates",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"oxidation_states",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_structure",required:!1,valid_types:"StructureData",info:""},{name:"output_trajectory",required:!1,valid_types:"TrajectoryData",info:""},{name:"output_wavefunction",required:!1,valid_types:"SinglefileData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"SCF calculation not converged"},{status:301,message:"Geometry optimization failed"},{status:302,message:"Unit cell not neutral"},{status:303,message:"Basis set linearly dependent"},{status:304,message:"Neighbour list too large"},{status:305,message:"No G-vectors left"},{status:306,message:"Collapsed geometry"},{status:307,message:"Closed shell run - spin polarization not allowed"},{status:308,message:"Parameters for model hessian not defined"},{status:309,message:"Fermi energy not in interval"},{status:310,message:"Insufficient indices for Madelung sums"},{status:350,message:"Internal memory error"},{status:360,message:"Inadequate elastic calculation: additional optimization needed"},{status:400,message:"Unknown error"},{status:401,message:"The retrieved folder data node could not be accessed"}]},class:"aiida_crystal_dft.calculations.parallel:CrystalParallelCalculation"},"crystal_dft.properties":{description:["AiiDA calculation plugin wrapping the properties executable."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:""},{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"wavefunction",required:!0,valid_types:"SinglefileData",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"structure",required:!1,valid_types:"StructureData, NoneType",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_bands",required:!1,valid_types:"BandsData",info:""},{name:"output_bands_down",required:!1,valid_types:"BandsData",info:""},{name:"output_dos",required:!1,valid_types:"ArrayData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The retrieved folder data node could not be accessed"},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"}]},class:"aiida_crystal_dft.calculations.properties:PropertiesCalculation"}},"aiida.parsers":{crystal_dft:"aiida_crystal_dft.parsers.cry_pycrystal:CrystalParser","crystal_dft.properties":"aiida_crystal_dft.parsers.properties:PropertiesParser"},"aiida.workflows":{"crystal_dft.base":{description:["Run CRYSTAL calculation"],spec:{inputs:[{name:"basis_family",required:!0,valid_types:"CrystalBasisFamilyData",info:""},{name:"code",required:!0,valid_types:"Code",info:""},{name:"options",required:!0,valid_types:"Dict",info:"Calculation options"},{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"restart_params",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_parameters",required:!1,valid_types:"Dict",info:""},{name:"output_structure",required:!1,valid_types:"StructureData",info:""},{name:"output_trajectory",required:!1,valid_types:"TrajectoryData",info:""},{name:"output_wavefunction",required:!1,valid_types:"SinglefileData",info:""},{name:"oxidation_states",required:!1,valid_types:"Dict",info:""},{name:"primitive_structure",required:!1,valid_types:"StructureData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:300,message:"CRYSTAL error"},{status:400,message:"Unknown error"}]},class:"aiida_crystal_dft.workflows.base:BaseCrystalWorkChain"}},"aiida.cmdline.data":{crystal_dft:"aiida_crystal_dft.cli.basis:basis_set"}},commits_count:19,summaryinfo:[{colorclass:"blue",text:"Calculations",count:3},{colorclass:"brown",text:"Parsers",count:2},{colorclass:"red",text:"Data",count:2},{colorclass:"green",text:"Workflows",count:1},{colorclass:"orange",text:"Other (Data commands)",count:1}],pip_install_cmd:"pip install git+https://github.com/tilde-lab/aiida-crystal-dft",is_installable:"True"},"aiida-crystal17":{code_home:"https://github.com/aiidaplugins/aiida-crystal17",development_status:"beta",documentation_url:"https://aiida-crystal17.readthedocs.io",entry_point_prefix:"crystal17",pip_url:"aiida-crystal17",plugin_info:"https://raw.githubusercontent.com/aiidaplugins/aiida-crystal17/master/setup.json",name:"aiida-crystal17",package_name:"aiida_crystal17",hosted_on:"github.com",metadata:{description:"AiiDA plugin for running the CRYSTAL17 code",author:"Chris Sewell",author_email:"chrisj_sewell@hotmail.com",license:"MIT",home_page:"https://github.com/chrisjsewell/aiida-crystal17",classifiers:["Framework :: AiiDA","Programming Language :: Python","Programming Language :: Python :: 2.7","Programming Language :: Python :: 3.6","Topic :: Scientific/Engineering :: Chemistry","Topic :: Scientific/Engineering :: Physics"],version:"0.11.0"},aiida_version:">=1.4.0,<2.0.0",entry_points:{"aiida.calculations":{"crystal17.basic":{description:["AiiDA calculation plugin to run the crystal17 executable,"," by supplying a normal .d12 input file and (optional) .gui file"],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"input_file",required:!0,valid_types:"SinglefileData",info:"the input .d12 file content."},{name:"input_external",required:!1,valid_types:"SinglefileData",info:"optional input fort.34 (gui) file content (for use with EXTERNAL keyword)."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"results",required:!0,valid_types:"Dict",info:"the data extracted from the main output file"},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"structure",required:!1,valid_types:"StructureData",info:"the structure output from the calculation"},{name:"symmetry",required:!1,valid_types:"SymmetryData",info:"the symmetry data from the calculation"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"the main (stdout) output file was not found"},{status:211,message:"the temporary retrieved folder was not found"},{status:300,message:"An error was flagged trying to parse the crystal exec stdout file"},{status:301,message:"An error occurred parsing the 'opta'/'optc' geometry files"},{status:302,message:"The crystal exec stdout file denoted that the run was a testgeom"},{status:350,message:"the input file could not be read by CRYSTAL"},{status:351,message:"CRYSTAL could not find the required wavefunction file"},{status:352,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:353,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:354,message:"Error in CHEMOD basis set modification"},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:401,message:"The calculation stopped prematurely because it ran out of memory."},{status:402,message:"The calculation stopped prematurely because it ran out of virtual memory."},{status:411,message:"SCF convergence did not finalise (usually due to reaching step limit)"},{status:412,message:"Geometry convergence did not finalise (usually due to reaching step limit)"},{status:413,message:"an error encountered usually during geometry optimisation"},{status:414,message:"an error was encountered during an SCF computation"},{status:415,message:"an unknown error was encountered, causing the MPI to abort"},{status:499,message:"The main crystal output file flagged an unhandled error"},{status:510,message:"inconsistency in the input and output symmetry"},{status:520,message:"primitive symmops were not found in the output file"}]},class:"aiida_crystal17.calculations.cry_basic:CryBasicCalculation"},"crystal17.doss":{description:["AiiDA calculation plugin to run the ``properties`` executable,"," for DOSS calculations."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"Dict",info:"the input parameters to create the properties input file."},{name:"wf_folder",required:!0,valid_types:"FolderData, RemoteData, SinglefileData",info:"the folder containing the wavefunction fort.9 file"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"results",required:!0,valid_types:"Dict",info:"Summary Data extracted from the output file(s)"},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"arrays",required:!1,valid_types:"ArrayData",info:"energies and DoS arrays"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"the main (stdout) output file was not found"},{status:211,message:"the temporary retrieved folder was not found"},{status:300,message:"An error was flagged trying to parse the crystal exec stdout file"},{status:350,message:"the input file could not be read by CRYSTAL"},{status:351,message:"CRYSTAL could not find the required wavefunction file"},{status:352,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:352,message:"parser could not find the output isovalue (fort.25) file"},{status:353,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:353,message:"error parsing output isovalue (fort.25) file"},{status:354,message:"Error in CHEMOD basis set modification"},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:401,message:"The calculation stopped prematurely because it ran out of memory."},{status:402,message:"The calculation stopped prematurely because it ran out of virtual memory."},{status:413,message:"an error encountered usually during geometry optimisation"},{status:414,message:"an error was encountered during an SCF computation"},{status:415,message:"an unknown error was encountered, causing the MPI to abort"},{status:499,message:"The main crystal output file flagged an unhandled error"}]},class:"aiida_crystal17.calculations.prop_doss:CryDossCalculation"},"crystal17.ech3":{description:["AiiDA calculation plugin to run the ``properties`` executable, for 3D charge density (ECH3)."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"Dict",info:"the input parameters to create the properties input file."},{name:"wf_folder",required:!0,valid_types:"FolderData, RemoteData, SinglefileData",info:"the folder containing the wavefunction fort.9 file"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"charge",required:!0,valid_types:"GaussianCube",info:"The charge density cube"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"results",required:!0,valid_types:"Dict",info:"Summary Data extracted from the output file(s)"},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"spin",required:!1,valid_types:"GaussianCube",info:"The spin density cube"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"the main (stdout) output file was not found"},{status:211,message:"the temporary retrieved folder was not found"},{status:300,message:"An error was flagged trying to parse the crystal exec stdout file"},{status:350,message:"the input file could not be read by CRYSTAL"},{status:351,message:"CRYSTAL could not find the required wavefunction file"},{status:352,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:352,message:"parser could not find the output density file"},{status:353,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:353,message:"error parsing output density file"},{status:354,message:"Error in CHEMOD basis set modification"},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:401,message:"The calculation stopped prematurely because it ran out of memory."},{status:402,message:"The calculation stopped prematurely because it ran out of virtual memory."},{status:413,message:"an error encountered usually during geometry optimisation"},{status:414,message:"an error was encountered during an SCF computation"},{status:415,message:"an unknown error was encountered, causing the MPI to abort"},{status:499,message:"The main crystal output file flagged an unhandled error"}]},class:"aiida_crystal17.calculations.prop_ech3:CryEch3Calculation"},"crystal17.main":{description:["AiiDA calculation plugin to run the crystal17 executable,"," by supplying aiida nodes, with data sufficient to create the"," .d12 input file and .gui file"],spec:{inputs:[{name:"basissets",required:!0,valid_types:"BasisSetData",info:"Use a node for the basis set of one of the elements in the structure. You have to pass an additional parameter ('element') specifying the atomic element symbol for which you want to use this basis set."},{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"CryInputParamsData",info:"the input parameters to create the .d12 file content."},{name:"structure",required:!0,valid_types:"StructureData",info:"structure used to construct the input fort.34 (gui) file"},{name:"kinds",required:!1,valid_types:"KindData",info:"additional structure kind specific data (e.g. initial spin)"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"symmetry",required:!1,valid_types:"SymmetryData",info:"the symmetry of the structure, used to construct the input .gui file (fort.34)"},{name:"wf_folder",required:!1,valid_types:"RemoteData",info:"An optional working directory, of a previously completed calculation, containing a fort.9 wavefunction file to restart from"}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"results",required:!0,valid_types:"Dict",info:"the data extracted from the main output file"},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"optimisation",required:!1,valid_types:"TrajectoryData",info:"atomic configurations, for each optimisation step"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"structure",required:!1,valid_types:"StructureData",info:"the structure output from the calculation"},{name:"symmetry",required:!1,valid_types:"SymmetryData",info:"the symmetry data from the calculation"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"the main (stdout) output file was not found"},{status:211,message:"the temporary retrieved folder was not found"},{status:300,message:"An error was flagged trying to parse the crystal exec stdout file"},{status:301,message:"An error occurred parsing the 'opta'/'optc' geometry files"},{status:302,message:"The crystal exec stdout file denoted that the run was a testgeom"},{status:350,message:"the input file could not be read by CRYSTAL"},{status:351,message:"CRYSTAL could not find the required wavefunction file"},{status:352,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:353,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:354,message:"Error in CHEMOD basis set modification"},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:401,message:"The calculation stopped prematurely because it ran out of memory."},{status:402,message:"The calculation stopped prematurely because it ran out of virtual memory."},{status:411,message:"SCF convergence did not finalise (usually due to reaching step limit)"},{status:412,message:"Geometry convergence did not finalise (usually due to reaching step limit)"},{status:413,message:"an error encountered usually during geometry optimisation"},{status:414,message:"an error was encountered during an SCF computation"},{status:415,message:"an unknown error was encountered, causing the MPI to abort"},{status:499,message:"The main crystal output file flagged an unhandled error"},{status:510,message:"inconsistency in the input and output symmetry"},{status:520,message:"primitive symmops were not found in the output file"}]},class:"aiida_crystal17.calculations.cry_main:CryMainCalculation"},"crystal17.newk":{description:["AiiDA calculation plugin to run the properties17 executable,"," for NEWK calculations (to return the fermi energy)"],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"Dict",info:"the input parameters to create the properties input file."},{name:"wf_folder",required:!0,valid_types:"FolderData, RemoteData, SinglefileData",info:"the folder containing the wavefunction fort.9 file"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"results",required:!0,valid_types:"Dict",info:"Summary Data extracted from the output file(s)"},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"the main (stdout) output file was not found"},{status:211,message:"the temporary retrieved folder was not found"},{status:300,message:"An error was flagged trying to parse the crystal exec stdout file"},{status:350,message:"the input file could not be read by CRYSTAL"},{status:351,message:"CRYSTAL could not find the required wavefunction file"},{status:352,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:353,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:354,message:"Error in CHEMOD basis set modification"},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:401,message:"The calculation stopped prematurely because it ran out of memory."},{status:402,message:"The calculation stopped prematurely because it ran out of virtual memory."},{status:413,message:"an error encountered usually during geometry optimisation"},{status:414,message:"an error was encountered during an SCF computation"},{status:415,message:"an unknown error was encountered, causing the MPI to abort"},{status:499,message:"The main crystal output file flagged an unhandled error"}]},class:"aiida_crystal17.calculations.prop_newk:CryNewkCalculation"},"crystal17.ppan":{description:["AiiDA calculation plugin to run the ``properties`` executable,"," for PPAN (Mulliken population analysis) calculations."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"Dict",info:"the input parameters to create the properties input file."},{name:"wf_folder",required:!0,valid_types:"FolderData, RemoteData, SinglefileData",info:"the folder containing the wavefunction fort.9 file"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"results",required:!0,valid_types:"Dict",info:"Summary Data extracted from the output file(s)"},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"the main (stdout) output file was not found"},{status:211,message:"the temporary retrieved folder was not found"},{status:300,message:"An error was flagged trying to parse the crystal exec stdout file"},{status:350,message:"the input file could not be read by CRYSTAL"},{status:351,message:"CRYSTAL could not find the required wavefunction file"},{status:352,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:352,message:"parser could not find the output PPAN.dat file"},{status:353,message:"Possibly due to erroneous CHEMOD basis set modification"},{status:353,message:"error parsing output PPAN.dat file"},{status:354,message:"Error in CHEMOD basis set modification"},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:401,message:"The calculation stopped prematurely because it ran out of memory."},{status:402,message:"The calculation stopped prematurely because it ran out of virtual memory."},{status:413,message:"an error encountered usually during geometry optimisation"},{status:414,message:"an error was encountered during an SCF computation"},{status:415,message:"an unknown error was encountered, causing the MPI to abort"},{status:499,message:"The main crystal output file flagged an unhandled error"}]},class:"aiida_crystal17.calculations.prop_ppan:CryPpanCalculation"}},"aiida.cmdline.data":{"crystal17.basis":"aiida_crystal17.cmndline.basis_set:basisset","crystal17.parse":"aiida_crystal17.cmndline.cmd_parser:parse","crystal17.symmetry":"aiida_crystal17.cmndline.symmetry:symmetry"},"aiida.data":{"crystal17.basisset":"aiida_crystal17.data.basis_set:BasisSetData","crystal17.gcube":"aiida_crystal17.data.gcube:GaussianCube","crystal17.kinds":"aiida_crystal17.data.kinds:KindData","crystal17.parameters":"aiida_crystal17.data.input_params:CryInputParamsData","crystal17.symmetry":"aiida_crystal17.data.symmetry:SymmetryData"},"aiida.groups":{"crystal17.basisset":"aiida_crystal17.data.basis_set:BasisSetFamily"},"aiida.parsers":{"crystal17.doss":"aiida_crystal17.parsers.cry_doss:CryDossParser","crystal17.ech3":"aiida_crystal17.parsers.cry_ech3:CryEch3Parser","crystal17.main":"aiida_crystal17.parsers.cry_main:CryMainParser","crystal17.newk":"aiida_crystal17.parsers.cry_newk:CryNewkParser","crystal17.ppan":"aiida_crystal17.parsers.cry_ppan:CryPpanParser"},"aiida.workflows":{"crystal17.main.base":{description:["Workchain to run a standard CRYSTAL17 calculation,"," with automated error handling and restarts."],spec:{inputs:[{name:"cry",required:!0,valid_types:"",info:""},{name:"basis_family",required:!1,valid_types:"Str",info:"An alternative to specifying the basis sets manually: one can specify the name of an existing basis set family and the work chain will generate the basis sets automatically based on the input structure."},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"kpoints_distance",required:!1,valid_types:"Float",info:"The minimum desired distance in 1/Å between k-points in reciprocal space. The explicit k-points will be generated automatically by the input structure, and will replace the SHRINK IS value in the input parameters.Note: This methods assumes the PRIMITIVE unit cell is provided"},{name:"kpoints_force_parity",required:!1,valid_types:"Bool",info:"Optional input when constructing the k-points based on a desired `kpoints_distance`. Setting this to `True` will force the k-point mesh to have an even number of points along each lattice vector except for any non-periodic directions."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"results",required:!0,valid_types:"Dict",info:"the data extracted from the main output file"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"structure",required:!1,valid_types:"StructureData",info:"the structure output from the calculation"},{name:"symmetry",required:!1,valid_types:"SymmetryData",info:"the symmetry data from the calculation"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:201,message:"The parameters could not be validated against the jsonschema."},{status:202,message:"The explicit `basis_sets` or `basis_family` could not be used to get the necessary basis sets."},{status:204,message:"The `metadata.options` did not specify both `resources.num_machines` and `max_wallclock_seconds`."},{status:300,message:"The calculation failed with an unrecoverable error."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:320,message:"The initialization calculation failed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_crystal17.workflows.crystal_main.base:CryMainBaseWorkChain"},"crystal17.properties":{description:["A WorkChain to compute properties of a structure, using CRYSTAL.",""," Either a pre-computed wavefunction (fort.9) file,"," or inputs for a CryMainCalculation, should be supplied."," Inputs for property calculations can then be added"," (currently available; doss, ech3)."],spec:{inputs:[{name:"check_remote",required:!1,valid_types:"Bool",info:"If a RemoteData wf_folder is input, check it contains the wavefunction file, before launching calculations. Note, this will fail if the remote computer is not immediately available"},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation will be cleaned at the end of execution."},{name:"doss",required:!1,valid_types:"",info:""},{name:"ech3",required:!1,valid_types:"",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"ppan",required:!1,valid_types:"",info:""},{name:"scf",required:!1,valid_types:"",info:""},{name:"test_run",required:!1,valid_types:"Bool",info:"break off the workchain before submitting a calculation"},{name:"wf_folder",required:!1,valid_types:"FolderData, RemoteData, SinglefileData",info:"the folder containing the wavefunction fort.9 file"}],outputs:[{name:"doss",required:!1,valid_types:"",info:""},{name:"ech3",required:!1,valid_types:"",info:""},{name:"ppan",required:!1,valid_types:"",info:""},{name:"scf",required:!1,valid_types:"",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:200,message:"Workchain ended before submitting calculation."},{status:201,message:"Neither a wf_folder nor scf calculation was supplied."},{status:202,message:"No property calculation inputs were supplied."},{status:203,message:"The supplied folder does contain the wavefunction file."},{status:210,message:"The SCF calculation submission failed."},{status:301,message:"The SCF calculation failed."},{status:302,message:"One or more property calculations failed."}]},class:"aiida_crystal17.workflows.crystal_props.base:CryPropertiesWorkChain"},"crystal17.sym3d":{description:["modify an AiiDa structure instance and compute its symmetry",""," Inequivalent atomic sites are dictated by atom kinds"],spec:{inputs:[{name:"settings",required:!0,valid_types:"Dict",info:""},{name:"cif",required:!1,valid_types:"CifData",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"structure",required:!1,valid_types:"StructureData",info:""}],outputs:[{name:"symmetry",required:!0,valid_types:"SymmetryData",info:""},{name:"structure",required:!1,valid_types:"StructureData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:300,message:"one of either a structure or cif input must be supplied"},{status:301,message:'the supplied structure must be 3D (i.e. have all dimensions pbc=True)"'},{status:302,message:"idealize can only be used when standardize=True"},{status:303,message:"the kind names supplied are not compatible with the structure"},{status:304,message:"error creating new structure"},{status:305,message:"error computing symmetry operations"}]},class:"aiida_crystal17.workflows.symmetrise_3d_struct:Symmetrise3DStructure"}},console_scripts:{mock_crystal17:"aiida_crystal17.tests.mock_crystal17:main",mock_properties17:"aiida_crystal17.tests.mock_properties17:main"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:6},{colorclass:"brown",text:"Parsers",count:5},{colorclass:"red",text:"Data",count:5},{colorclass:"green",text:"Workflows",count:3},{colorclass:"purple",text:"Console scripts",count:2},{colorclass:"orange",text:"Other (Data commands, Groups)",count:4}],pip_install_cmd:"pip install aiida-crystal17",is_installable:"True"},"aiida-cusp":{code_home:"https://github.com/aiida-cusp/aiida-cusp",documentation_url:"https://aiida-cusp.readthedocs.io",entry_point_prefix:"cusp",pip_url:"https://pypi.org/project/aiida-cusp",name:"aiida-cusp",package_name:"aiida_cusp",hosted_on:"github.com",metadata:{author:"Andreas Stamminger",author_email:"stammingera@gmail.com",version:"0.1.0b2",description:"Custodian based VASP Plugin for AiiDA",classifiers:["Development Status :: 4 - Beta","License :: OSI Approved :: MIT License","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Topic :: Scientific/Engineering :: Physics","Topic :: Scientific/Engineering :: Chemistry","Environment :: Plugins","Framework :: AiiDA"]},aiida_version:">=1.3.0,<2.0.0",entry_points:{"aiida.data":{"cusp.kpoints":"aiida_cusp.data.inputs.vasp_kpoint:VaspKpointData","cusp.poscar":"aiida_cusp.data.inputs.vasp_poscar:VaspPoscarData","cusp.incar":"aiida_cusp.data.inputs.vasp_incar:VaspIncarData","cusp.potcar":"aiida_cusp.data.inputs.vasp_potcar:VaspPotcarData","cusp.vasprun":"aiida_cusp.data.outputs.vasp_vasprun:VaspVasprunData","cusp.outcar":"aiida_cusp.data.outputs.vasp_outcar:VaspOutcarData","cusp.contcar":"aiida_cusp.data.outputs.vasp_contcar:VaspContcarData","cusp.chgcar":"aiida_cusp.data.outputs.vasp_chgcar:VaspChgcarData","cusp.wavecar":"aiida_cusp.data.outputs.vasp_wavecar:VaspWavecarData","cusp.generic":"aiida_cusp.data.outputs.vasp_generic:VaspGenericData","cusp.potcarfile":"aiida_cusp.data.inputs.vasp_potcar:VaspPotcarFile"},"aiida.calculations":{"cusp.vasp":"aiida_cusp.calculators.vasp_calculation:VaspCalculation"},"aiida.parsers":{"cusp.default":"aiida_cusp.parsers.vasp_file_parser:VaspFileParser"},"aiida.cmdline.data":{potcar:"aiida_cusp.cli.potcar_cmd:potcar"}},commits_count:68,development_status:"beta",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:11},{colorclass:"orange",text:"Other (Data commands)",count:1}],pip_install_cmd:"pip install https://pypi.org/project/aiida-cusp",is_installable:"False"},"aiida-dataframe":{entry_point_prefix:"dataframe",plugin_info:"https://raw.github.com/janssenhenning/aiida-dataframe/main/pyproject.toml",code_home:"https://github.com/janssenhenning/aiida-dataframe",version_file:"https://raw.githubusercontent.com/janssenhenning/aiida-dataframe/main/aiida_dataframe/__init__.py",pip_url:"aiida-dataframe",documentation_url:"https://aiida-dataframe.readthedocs.io/en/latest/",name:"aiida-dataframe",package_name:"aiida_dataframe",hosted_on:"github.com",metadata:{description:"AiiDA data plugin for pandas DataFrame objects",author_email:"Henning Janßen ",classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],version:"0.1.3"},aiida_version:">=1.0,<3",entry_points:{"aiida.cmdline.data":{dataframe:"aiida_dataframe.cli:data_cli"},"aiida.data":{"dataframe.frame":"aiida_dataframe.data.dataframe:PandasFrameData"}},commits_count:13,development_status:"beta",summaryinfo:[{colorclass:"red",text:"Data",count:1},{colorclass:"orange",text:"Other (Data commands)",count:1}],pip_install_cmd:"pip install aiida-dataframe",is_installable:"True"},"aiida-ddec":{code_home:"https://github.com/lsmo-epfl/aiida-ddec",entry_point_prefix:"ddec",pip_url:"git+https://github.com/yakutovicha/aiida-ddec",name:"aiida-ddec",package_name:"aiida_ddec",hosted_on:"github.com",metadata:{author:"Aliaksandr Yakutovich",author_email:"aliaksandr.yakutovich@epfl.ch",version:"1.1.0",description:"AiiDA plugin for DDEC code",classifiers:["License :: OSI Approved :: MIT License","Programming Language :: Python :: 2.7","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Framework :: AiiDA","Development Status :: 5 - Production/Stable"]},aiida_version:">=1.1.0,<3",entry_points:{"aiida.calculations":{ddec:{description:["AiiDA plugin for the ddec code that performs density derived"," electrostatic and chemical atomic population analysis."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:"Input parameters such as net charge, protocol, atomic densities path, ..."},{name:"charge_density_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Use a remote folder (for restarts and similar)"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"structure_ddec",required:!0,valid_types:"CifData",info:"structure with DDEC charges"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The retrieved folder data node could not be accessed."},{status:101,message:"The retrieved folder does not contain an output file."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"}]},class:"aiida_ddec.calculations:DdecCalculation"}},"aiida.parsers":{ddec:"aiida_ddec.parsers:DdecParser"},"aiida.workflows":{"ddec.cp2k_ddec":"aiida_ddec.workchains:Cp2kDdecWorkChain"}},commits_count:9,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install git+https://github.com/yakutovicha/aiida-ddec",is_installable:"True"},"aiida-defects":{code_home:"https://github.com/epfl-theos/aiida-defects",entry_point_prefix:"defects",pip_url:"aiida-defects",plugin_info:"https://raw.githubusercontent.com/epfl-theos/aiida-defects/master/pyproject.toml",name:"aiida-defects",package_name:"aiida_defects",hosted_on:"github.com",metadata:{description:"AiiDA-Defects is a plugin for the AiiDA computational materials science framework, and provides tools and automated workflows for the study of defects in materials.",author:"The AiiDA-Defects developers",classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python"],version:"1.0.1"},aiida_version:">=2.0,<3",entry_points:{"aiida.data":{"defects.array.stability":"aiida_defects.data.data:StabilityData"},"aiida.workflows":{"defects.formation_energy.chemical_potential":"aiida_defects.formation_energy.chemical_potential.chemical_potential:ChemicalPotentialWorkchain","defects.formation_energy.corrections.gaussian_countercharge":"aiida_defects.formation_energy.corrections.gaussian_countercharge.gaussian_countercharge:GaussianCounterChargeWorkchain","defects.formation_energy.corrections.gaussian_countercharge.model_potential":"aiida_defects.formation_energy.corrections.gaussian_countercharge.model_potential.model_potential:ModelPotentialWorkchain","defects.formation_energy.corrections.point_countercharge":"aiida_defects.formation_energy.corrections.point_countercharge.point_countercharge:PointCounterChargeWorkchain","defects.formation_energy.potential_alignment":"aiida_defects.formation_energy.potential_alignment.potential_alignment:PotentialAlignmentWorkchain","defects.formation_energy.qe":"aiida_defects.formation_energy.formation_energy_qe:FormationEnergyWorkchainQE","defects.formation_energy.siesta":"aiida_defects.formation_energy.formation_energy_siesta:FormatonEnergyWorkchainSiesta"}},commits_count:10,development_status:"beta",summaryinfo:[{colorclass:"red",text:"Data",count:1},{colorclass:"green",text:"Workflows",count:7}],pip_install_cmd:"pip install aiida-defects",is_installable:"True"},"aiida-diff":{code_home:"https://github.com/aiidateam/aiida-diff",development_status:"stable",documentation_url:"https://aiida-diff.readthedocs.io/",entry_point_prefix:"diff",pip_url:"git+https://github.com/aiidateam/aiida-diff#egg=aiida-diff-0.1.0a0",name:"aiida-diff",package_name:"aiida_diff",hosted_on:"github.com",metadata:{description:"AiiDA demo plugin that wraps the `diff` executable for computing the difference between two files.",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Development Status :: 3 - Alpha","Framework :: AiiDA"],author:"The AiiDA Team"},aiida_version:">=2.0,<3",entry_points:{"aiida.data":{diff:"aiida_diff.data:DiffParameters"},"aiida.calculations":{diff:"aiida_diff.calculations:DiffCalculation"},"aiida.parsers":{diff:"aiida_diff.parsers:DiffParser"},"aiida.cmdline.data":{diff:"aiida_diff.cli:data_cli"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:1},{colorclass:"orange",text:"Other (Data commands)",count:1}],pip_install_cmd:"pip install git+https://github.com/aiidateam/aiida-diff#egg=aiida-diff-0.1.0a0",is_installable:"False"},"aiida-donothing":{code_home:"https://github.com/atztogo/aiida-donothing",entry_point_prefix:"donothing",name:"aiida-donothing",package_name:"aiida_donothing",hosted_on:"github.com",metadata:{author:"Atsushi Togo",author_email:"atz.togo@gmail.com",version:"0.1",description:"AiiDA calculation plugin for doing nothing",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Framework :: AiiDA"]},aiida_version:">=1.6.5,<2.0.0",entry_points:{"aiida.calculations":{"donothing.donothing":"aiida_donothing.calculations.donothing:DoNothingCalculation"},"aiida.parsers":{"donothing.donothing":"aiida_donothing.parsers.donothing:DoNothingParser"}},commits_count:1,development_status:"planning",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"See source code repository."},"aiida-dynamic-workflows":{code_home:"https://github.com/microsoft/aiida-dynamic-workflows",entry_point_prefix:"dynamic_workflows",name:"aiida-dynamic-workflows",package_name:"aiida_dynamic_workflows",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:0,development_status:"planning",summaryinfo:[],pip_install_cmd:"See source code repository."},"aiida-environ":{code_home:"https://github.com/environ-developers/aiida-environ",entry_point_prefix:"environ",pip_url:"git+https://github.com/environ-developers/aiida-environ",name:"aiida-environ",package_name:"aiida_environ",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:0,development_status:"planning",summaryinfo:[],pip_install_cmd:"pip install git+https://github.com/environ-developers/aiida-environ"},"aiida-eon":{code_home:"https://github.com/HaoZeke/aiida-eon",entry_point_prefix:"eon",name:"aiida-eon",package_name:"aiida_eon",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:0,development_status:"planning",summaryinfo:[],pip_install_cmd:"See source code repository."},"aiida-eonclient":{code_home:"https://github.com/HaoZeke/aiida-eonclient",entry_point_prefix:"eonclient",name:"aiida-eonclient",package_name:"aiida_eonclient",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:0,development_status:"planning",summaryinfo:[],pip_install_cmd:"See source code repository."},"aiida-fenics":{code_home:"https://github.com/sphuber/aiida-fenics/tree/master",entry_point_prefix:"fenics",pip_url:"git+https://github.com/sphuber/aiida-fenics",name:"aiida-fenics",package_name:"aiida_fenics",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:-1,development_status:"planning",summaryinfo:[],pip_install_cmd:"pip install git+https://github.com/sphuber/aiida-fenics"},"aiida-firecrest":{code_home:"https://github.com/aiidateam/aiida-firecrest",entry_point_prefix:"firecrest",pip_url:"aiida-firecrest",plugin_info:"https://raw.githubusercontent.com/aiidateam/aiida-firecrest/main/pyproject.toml",name:"aiida-firecrest",package_name:"aiida_firecrest",hosted_on:"github.com",metadata:{description:"AiiDA Transport/Scheduler plugins for interfacing with FirecREST.",author_email:"Chris Sewell ",classifiers:["Development Status :: 3 - Alpha","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Programming Language :: Python :: Implementation :: CPython","Topic :: Software Development :: Libraries :: Python Modules"],version:"0.1.0a1"},aiida_version:"<2",entry_points:{"aiida.schedulers":{firecrest:"aiida_firecrest.scheduler:FirecrestScheduler"},"aiida.transports":{firecrest:"aiida_firecrest.transport:FirecrestTransport"},console_scripts:{"aiida-firecrest-cli":"aiida_firecrest.cli:main"}},commits_count:19,development_status:"alpha",summaryinfo:[{colorclass:"purple",text:"Console scripts",count:1},{colorclass:"orange",text:"Other (Schedulers, Transports)",count:2}],pip_install_cmd:"pip install --pre aiida-firecrest",is_installable:"True"},"aiida-fireworks-scheduler":{code_home:"https://github.com/zhubonan/aiida-fireworks-scheduler",development_status:"beta",documentation_url:"https://aiida-fireworks-scheduler.readthedocs.io",entry_point_prefix:"fireworks_scheduler",pip_url:"git+https://github.com/zhubonan/aiida-fireworks-scheduler",name:"aiida-fireworks-scheduler",package_name:"aiida_fireworks_scheduler",hosted_on:"github.com",metadata:{author:"Bonan Zhu",author_email:"zhubonan@outlook.com",version:"1.2.0",description:"AiiDA plugin to allow using `fireworks` as the execution engine for `CalcJob`.",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Framework :: AiiDA"]},aiida_version:null,entry_points:{"aiida.schedulers":{fireworks:"aiida_fireworks_scheduler.fwscheduler:FwScheduler","fireworks_scheduler.default":"aiida_fireworks_scheduler.fwscheduler:FwScheduler","fireworks_scheduler.keepenv":"aiida_fireworks_scheduler.fwscheduler:FwSchedulerKeepEnv"},"aiida.cmdline.data":{"fireworks-scheduler":"aiida_fireworks_scheduler.cmdline:fw_cli"},console_scripts:{arlaunch:"aiida_fireworks_scheduler.scripts.arlaunch_run:arlaunch"}},commits_count:0,summaryinfo:[{colorclass:"purple",text:"Console scripts",count:1},{colorclass:"orange",text:"Other (Data commands, Schedulers)",count:4}],pip_install_cmd:"pip install git+https://github.com/zhubonan/aiida-fireworks-scheduler",is_installable:"True"},"aiida-fleur":{code_home:"https://github.com/JuDFTteam/aiida-fleur/tree/develop",development_status:"stable",documentation_url:"https://aiida-fleur.readthedocs.io/",entry_point_prefix:"fleur",pip_url:"aiida-fleur",plugin_info:"https://raw.github.com/JuDFTteam/aiida-fleur/develop/setup.json",name:"aiida-fleur",package_name:"aiida_fleur",hosted_on:"github.com",metadata:{description:"AiiDA Plugin for running the FLEUR code and its input generator. Also includes high-level workchains and utilities",author_email:"The JuDFT team ",classifiers:["Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Physics"],version:"2.0.0"},aiida_version:">=2.0.1,<3.0.0",entry_points:{"aiida.calculations":{"fleur.fleur":{description:["A CalcJob class that represents FLEUR DFT calculation."," For more information about the FLEUR-code family go to http://www.flapw.de/"],spec:{inputs:[{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:"Use a FleurinpData node that specifies the input parametersusually copy from the parent calculation, basically makesthe inp.xml file visible in the db and makes sure it has the files needed."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parent_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Use a remote or local repository folder as parent folder (also for restarts and similar). It should contain all the needed files for a Fleur calc, only edited files should be uploaded from the repository."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"This parameter data node is used to specify for some advanced features how the plugin behaves. You can add filesthe retrieve list, or add command line switches, for all available features here check the documentation."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"error_params",required:!1,valid_types:"Dict",info:""},{name:"output_parameters",required:!1,valid_types:"Dict",info:""},{name:"output_params_complex",required:!1,valid_types:"Dict",info:""},{name:"relax_parameters",required:!1,valid_types:"Dict",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"No retrieved folder found."},{status:301,message:"One of the output files can not be opened."},{status:302,message:"FLEUR calculation failed for unknown reason."},{status:303,message:"XML output file was not found."},{status:304,message:"Parsing of XML output file failed."},{status:305,message:"Parsing of relax XML output file failed."},{status:310,message:"FLEUR calculation failed due to lack of memory."},{status:311,message:"FLEUR calculation failed because an atom spilled to thevacuum during relaxation"},{status:312,message:"FLEUR calculation failed due to MT overlap."},{status:313,message:"Overlapping MT-spheres during relaxation."},{status:314,message:"Problem with cdn is suspected. Consider removing cdn"},{status:315,message:"The LDA+U density matrix contains invalid elements."},{status:316,message:"Calculation failed due to time limits."},{status:318,message:"Calculation failed due to missing dependency ({name}) for given calculation."}]},class:"aiida_fleur.calculation.fleur:FleurCalculation"},"fleur.inpgen":{description:["JobCalculationClass for the inpgen, which is a preprocessor for a FLEUR calculation."," For more information about produced files and the FLEUR-code family, go to http://www.flapw.de/."],spec:{inputs:[{name:"structure",required:!0,valid_types:"StructureData",info:"Choose the input structure to use"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Use a node that specifies the input parameters for the namelists"},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"This parameter data node is used to specify for some advanced features how the plugin behaves. You can add filesthe retrieve list, or add command line switches, for all available features here check the documentation."}],outputs:[{name:"fleurinp",required:!0,valid_types:"FleurinpData",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"No retrieved folder found."},{status:301,message:"One of the output files can not be opened."},{status:306,message:"XML input file was not found."},{status:307,message:"Some required files were not retrieved."},{status:308,message:"During parsing: FleurinpData could not be initialized, see log. "},{status:309,message:"During parsing: FleurinpData failed validation."},{status:310,message:"The profile {profile} is not known to the used inpgen code"}]},class:"aiida_fleur.calculation.fleurinputgen:FleurinputgenCalculation"}},"aiida.data":{"fleur.fleurinp":"aiida_fleur.data.fleurinp:FleurinpData"},"aiida.parsers":{"fleur.fleurinpgenparser":"aiida_fleur.parsers.fleur_inputgen:Fleur_inputgenParser","fleur.fleurparser":"aiida_fleur.parsers.fleur:FleurParser"},"aiida.workflows":{"fleur.banddos":{description:["This workflow calculated a bandstructure from a Fleur calculation",""," :Params: a Fleurcalculation node"," :returns: Success, last result node, list with convergence behavior"],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:""},{name:"kpoints",required:!1,valid_types:"KpointsData, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"remote",required:!1,valid_types:"RemoteData, NoneType",info:""},{name:"scf",required:!1,valid_types:"Data",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"banddos_calc",required:!0,valid_types:"",info:""},{name:"output_banddos_wc_para",required:!0,valid_types:"Dict",info:""},{name:"output_banddos_wc_bands",required:!1,valid_types:"BandsData",info:""},{name:"output_banddos_wc_dos",required:!1,valid_types:"XyData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"Invalid input configuration."},{status:233,message:"Invalid code node specified, check inpgen and fleur code nodes."},{status:235,message:"Input file modification failed."},{status:236,message:"Input file was corrupted after user's modifications."},{status:334,message:"SCF calculation failed."},{status:335,message:"Found no SCF calculation remote repository."}]},class:"aiida_fleur.workflows.banddos:FleurBandDosWorkChain"},"fleur.base":{description:["Workchain to run a FLEUR calculation with automated error handling and restarts"],spec:{inputs:[{name:"options",required:!0,valid_types:"Dict",info:"Optional parameters to set up computational details."},{name:"add_comp_para",required:!1,valid_types:"Dict",info:"Gives additional control over computational parametersonly_even_MPI: set to true if you want to suppress odd number of MPI processes in parallelisation.This might speedup a calculation for machines having even number of sockets per node.max_queue_nodes: maximal number of nodes allowed on the remote machine. Used only to automatically solve some FLEUR failures.max_queue_wallclock_sec: maximal wallclock time allowed on the remote machine. Used only to automatically solve some FLEUR failures."},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"description",required:!1,valid_types:"str, NoneType",info:"Calculation description."},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:"Use a FleurinpData node that specifies the input parametersusually copy from the parent calculation, basically makesthe inp.xml file visible in the db and makes sure it has the files needed."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"label",required:!1,valid_types:"str, NoneType",info:"Calculation label."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parent_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Use a remote or local repository folder as parent folder (also for restarts and similar). It should contain all the needed files for a Fleur calc, only edited files should be uploaded from the repository."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"This parameter data node is used to specify for some advanced features how the plugin behaves. You can add filesthe retrieve list, or add command line switches, for all available features here check the documentation."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"error_params",required:!1,valid_types:"Dict",info:""},{name:"output_parameters",required:!1,valid_types:"Dict",info:""},{name:"output_params_complex",required:!1,valid_types:"Dict",info:""},{name:"relax_parameters",required:!1,valid_types:"Dict",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:311,message:"FLEUR calculation failed because an atom spilled to thevacuum during relaxation"},{status:313,message:"Overlapping MT-spheres during relaxation."},{status:388,message:"Computational resources are not optimal."},{status:389,message:"Computational resources are not optimal."},{status:390,message:"Computational resources are not optimal."},{status:399,message:"FleurCalculation failed and FleurBaseWorkChain has no strategy to resolve this"},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_fleur.workflows.base_fleur:FleurBaseWorkChain"},"fleur.base_relax":{description:["Workchain to run Relax WorkChain with automated error handling and restarts"],spec:{inputs:[{name:"scf",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"description",required:!1,valid_types:"str, NoneType",info:"Calculation description."},{name:"final_scf",required:!1,valid_types:"Data",info:""},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"label",required:!1,valid_types:"str, NoneType",info:"Calculation label."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"last_scf",required:!0,valid_types:"",info:""},{name:"optimized_structure",required:!0,valid_types:"StructureData",info:""},{name:"output_relax_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:399,message:"FleurRelaxWorkChain failed and FleurBaseRelaxWorkChain has no strategy to resolve this"},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_fleur.workflows.base_relax:FleurBaseRelaxWorkChain"},"fleur.cfcoeff":{description:["Workflow for calculating rare-earth crystal field coefficients"],spec:{inputs:[{name:"metadata",required:!1,valid_types:"",info:""},{name:"orbcontrol",required:!1,valid_types:"Data",info:""},{name:"scf",required:!1,valid_types:"Data",info:""},{name:"scf_rare_earth_analogue",required:!1,valid_types:"Data",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_cfcoeff_wc_para",required:!0,valid_types:"Dict",info:""},{name:"output_cfcoeff_wc_charge_densities",required:!1,valid_types:"XyData",info:""},{name:"output_cfcoeff_wc_potentials",required:!1,valid_types:"XyData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"Invalid input configuration."},{status:235,message:"Input file modification failed."},{status:236,message:"Input file was corrupted after user's modifications."},{status:345,message:"Convergence scf workflow failed."},{status:451,message:"Convergence orbcontrol workflow failed."},{status:452,message:"CF calculation failed."}]},class:"aiida_fleur.workflows.cfcoeff:FleurCFCoeffWorkChain"},"fleur.corehole":{description:["Turn key solution for a corehole calculation with the FLEUR code."," Has different protocols for different core-hole types (valence, charge).",""," Calculates supercells. Extracts binding energies"," for certain corelevels from the total energy differences a the calculation with"," corehole and without.",""," Documentation:"," See help for details.",""," Two paths are possible:",""," (1) Start from a structure -> workchains run inpgen first (recommended)"," (2) Start from a Fleurinp data object",""," Also it is recommended to provide a calc parameter node for the structure",""," :param wf_parameters: Dict node, specify, resources and what should be calculated"," :param structure: structureData node, crystal structure"," :param calc_parameters: Dict node, inpgen parameters for the crystal structure"," :param fleurinp: fleurinpData node,"," :param inpgen: Code node,"," :param fleur: Code node,",""," :return: output_corehole_wc_para Dict node, successful=True if no error",""," :uses workchains: fleur_scf_wc, fleur_relax_wc"," :uses calcfunctions: supercell, create_corehole_result_node, prepare_struc_corehole_wf"],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"inpgen",required:!0,valid_types:"Code",info:""},{name:"calc_parameters",required:!1,valid_types:"Dict, NoneType",info:""},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"structure",required:!1,valid_types:"StructureData, NoneType",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_corehole_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:1,message:"The input resources are invalid."},{status:2,message:"The process failed with legacy failure mode."},{status:2,message:"Input resources are missing."},{status:3,message:"The code provided is invalid, or not of the right kind."},{status:4,message:"Inpgen calculation FAILED, check output"},{status:5,message:"Changing of the FLEURINP data went wrong, check log."},{status:6,message:"The FLEUR input file for the calculation did not validate."},{status:7,message:"At least one FLEUR calculation FAILED, check the output and log."},{status:8,message:"At least one FLEUR calculation did not/could not reach thedesired convergece Criteria, with the current parameters."},{status:9,message:"Something went wrong in the determiation what coreholes to calculate, probably the input format was not correct. Check log."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_fleur.workflows.corehole:FleurCoreholeWorkChain"},"fleur.create_magnetic":{description:["This workflow creates relaxed magnetic film on a substrate."],spec:{inputs:[{name:"distance_suggestion",required:!1,valid_types:"Dict, NoneType",info:""},{name:"eos",required:!1,valid_types:"Data",info:""},{name:"eos_output",required:!1,valid_types:"Dict, NoneType",info:""},{name:"interlayer_dist",required:!1,valid_types:"Dict, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"optimized_structure",required:!1,valid_types:"StructureData, NoneType",info:""},{name:"relax",required:!1,valid_types:"Data",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"magnetic_structure",required:!0,valid_types:"StructureData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"Invalid input configuration."},{status:380,message:"Specified substrate has to be bcc or fcc."},{status:382,message:"Relaxation calculation failed."},{status:383,message:"EOS WorkChain failed."}]},class:"aiida_fleur.workflows.create_magnetic_film:FleurCreateMagneticWorkChain"},"fleur.dmi":{description:["This workflow calculates DMI energy dispersion of a structure."],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"remote",required:!1,valid_types:"RemoteData, NoneType",info:""},{name:"scf",required:!1,valid_types:"Data",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_dmi_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"Invalid input configuration."},{status:233,message:"Invalid code node specified, check inpgen and fleur code nodes."},{status:235,message:"Input file modification failed."},{status:236,message:"Input file was corrupted after user's modifications."},{status:334,message:"Reference calculation failed."},{status:335,message:"Found no reference calculation remote repository."},{status:336,message:"Force theorem calculation failed."}]},class:"aiida_fleur.workflows.dmi:FleurDMIWorkChain"},"fleur.dos":{description:["DEPRECATED: Use FleurBandDosWorkChain instead (entrypoint fleur.banddos)"," This workflow calculated a DOS from a Fleur calculation",""," :Params: a Fleurcalculation node"," :returns: Success, last result node, list with convergence behavior",""," wf_parameters: { 'tria', 'nkpts', 'sigma', 'emin', 'emax'}"," defaults : tria = True, nkpts = 800, sigma=0.005, emin= -0.3, emax = 0.8"],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"calc_parameters",required:!1,valid_types:"Dict, NoneType",info:""},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"remote_data",required:!1,valid_types:"RemoteData, NoneType",info:""},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_fleur.workflows.dos:fleur_dos_wc"},"fleur.eos":{description:["This workflow calculates the equation of states of a structure."," Calculates several unit cells with different volumes."," A Birch_Murnaghan equation of states fit determines the Bulk modulus and the"," groundstate volume of the cell.",""," :params wf_parameters: Dict node, optional 'wf_parameters', protocol specifying parameter dict"," :params structure: StructureData node, 'structure' crystal structure"," :params calc_parameters: Dict node, optional 'calc_parameters' parameters for inpgen"," :params inpgen: Code node,"," :params fleur: Code node,","",""," :return output_eos_wc_para: Dict node, contains relevant output information."," about general succeed, fit results and so on."],spec:{inputs:[{name:"scf",required:!0,valid_types:"Data",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_eos_wc_para",required:!0,valid_types:"Dict",info:""},{name:"output_eos_wc_structure",required:!0,valid_types:"StructureData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:400,message:"At least one of the SCF sub processes did not finish successfully."}]},class:"aiida_fleur.workflows.eos:FleurEosWorkChain"},"fleur.init_cls":{description:["Turn key solution for the calculation of core level shift"],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"calc_parameters",required:!1,valid_types:"Dict, NoneType",info:""},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:""},{name:"inpgen",required:!1,valid_types:"Code, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"structure",required:!1,valid_types:"StructureData, NoneType",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_initial_cls_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_fleur.workflows.initial_cls:FleurInitialCLSWorkChain"},"fleur.mae":{description:["This workflow calculates the Magnetic Anisotropy Energy of a structure."],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"remote",required:!1,valid_types:"RemoteData, NoneType",info:""},{name:"scf",required:!1,valid_types:"Data",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_mae_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"Invalid input configuration."},{status:233,message:"Invalid code node specified, check inpgen and fleur code nodes."},{status:235,message:"Input file modification failed."},{status:236,message:"Input file was corrupted after user's modifications."},{status:334,message:"Reference calculation failed."},{status:335,message:"Found no reference calculation remote repository."},{status:336,message:"Force theorem calculation failed."}]},class:"aiida_fleur.workflows.mae:FleurMaeWorkChain"},"fleur.mae_conv":{description:["This workflow calculates the Magnetic Anisotropy Energy of a structure."],spec:{inputs:[{name:"scf",required:!0,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_mae_conv_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:343,message:"Convergence MAE calculation failed for all SQAs."},{status:344,message:"Convergence MAE calculation failed for some SQAs."}]},class:"aiida_fleur.workflows.mae_conv:FleurMaeConvWorkChain"},"fleur.orbcontrol":{description:["Workchain for determining the groundstate density matrix in an DFT+U"," calculation. This is done in 2 or 3 steps:",""," 1. Converge the system without DFT+U (a converged calculation can be"," provided to skip this step)"," 2. A fixed number of iterations is run with fixed density matrices"," either generated as all distinct permutations for the given occupations"," or the explicitly given configurations"," 3. The system and density matrix is relaxed",""," :param wf_parameters: (Dict), Workchain Specifications"," :param scf_no_ldau: (Dict), Inputs to a FleurScfWorkChain providing the initial system"," either converged or staring from a structure"," :param scf_with_ldau: (Dict), Inputs to a FleurScfWorkChain. Only the wf_parameters are valid"," :param fleurinp: (FleurinpData) FleurinpData to start from if no SCF should be done"," :param remote: (RemoteData) RemoteData to start from if no SCF should be done"," :param structure: (StructureData) Structure to start from if no SCF should be done"," :param calc_parameters: (Dict), Inpgen Parameters"," :param settings: (Dict), additional settings for e.g retrieving files"," :param options: (Dict), Options for the submission of the jobs"," :param inpgen: (Code)"," :param fleur: (Code)"],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"calc_parameters",required:!1,valid_types:"Dict, NoneType",info:""},{name:"fixed_remotes",required:!1,valid_types:"RemoteData",info:""},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:""},{name:"inpgen",required:!1,valid_types:"Code, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"options_inpgen",required:!1,valid_types:"Dict, NoneType",info:""},{name:"relaxed_remotes",required:!1,valid_types:"RemoteData",info:""},{name:"remote",required:!1,valid_types:"RemoteData, NoneType",info:""},{name:"scf_no_ldau",required:!1,valid_types:"Data",info:"Inputs for SCF Workchain before adding LDA+U"},{name:"scf_with_ldau",required:!1,valid_types:"Data",info:"Inputs for SCF Workchain after the LDA+U matrix was fixed"},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:""},{name:"settings_inpgen",required:!1,valid_types:"Dict, NoneType",info:""},{name:"structure",required:!1,valid_types:"StructureData, NoneType",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"groundstate_scf",required:!0,valid_types:"",info:""},{name:"output_orbcontrol_wc_para",required:!0,valid_types:"Dict",info:""},{name:"groundstate_denmat",required:!1,valid_types:"SinglefileData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"Invalid input configuration."},{status:233,message:"Input codes do not correspond to fleur or inpgen respectively."},{status:235,message:"Input file modification failed."},{status:236,message:"Input file was corrupted after user's modifications."},{status:342,message:"Convergence LDA+U calculation failed for some Initial configurations."},{status:343,message:"Convergence LDA+U calculation failed for all Initial configurations."},{status:360,message:"Inpgen calculation failed."},{status:450,message:"Convergence workflow without LDA+U failed."}]},class:"aiida_fleur.workflows.orbcontrol:FleurOrbControlWorkChain"},"fleur.relax":{description:["This workflow performs structure optimization."],spec:{inputs:[{name:"scf",required:!0,valid_types:"Data",info:""},{name:"final_scf",required:!1,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"last_scf",required:!0,valid_types:"",info:""},{name:"optimized_structure",required:!0,valid_types:"StructureData",info:""},{name:"output_relax_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"If you want to run a final scf inpgen has to be there."},{status:311,message:"FLEUR calculation failed because an atom spilled to thevacuum during relaxation"},{status:313,message:"Overlapping MT-spheres during relaxation."},{status:350,message:"Optimization cycle did not lead to convergence of forces."},{status:351,message:"SCF Workchains failed for some reason."},{status:352,message:"Found no relaxed structure info in the output of SCF"},{status:353,message:"Found no SCF output"},{status:354,message:"Force is small, switch to BFGS"}]},class:"aiida_fleur.workflows.relax:FleurRelaxWorkChain"},"fleur.relax_torque":{description:["This workflow performs spin structure optimization."],spec:{inputs:[{name:"scf",required:!0,valid_types:"Data",info:""},{name:"final_scf",required:!1,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_relax_torque_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"If you want to run a final scf inpgen has to be there."},{status:350,message:"Optimization cycle did not lead to convergence."},{status:351,message:"An SCF Workchain failed for some reason."}]},class:"aiida_fleur.workflows.relax_torque:FleurRelaxTorqueWorkChain"},"fleur.scf":{description:["Workchain for converging a FLEUR calculation (SCF).",""," It converges the charge density, total energy or the largest force."," Two paths are possible:",""," (1) Start from a structure and run the inpgen first optional with calc_parameters"," (2) Start from a Fleur calculation, with optional remoteData",""," :param wf_parameters: (Dict), Workchain Specifications"," :param structure: (StructureData), Crystal structure"," :param calc_parameters: (Dict), Inpgen Parameters"," :param fleurinp: (FleurinpData), to start with a Fleur calculation"," :param remote_data: (RemoteData), from a Fleur calculation"," :param inpgen: (Code)"," :param fleur: (Code)",""," :return: output_scf_wc_para (Dict), Information of workflow results"," like Success, last result node, list with convergence behavior"],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"calc_parameters",required:!1,valid_types:"Dict, NoneType",info:""},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:""},{name:"inpgen",required:!1,valid_types:"Code, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"remote_data",required:!1,valid_types:"RemoteData, NoneType",info:""},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:""},{name:"settings_inpgen",required:!1,valid_types:"Dict, NoneType",info:""},{name:"structure",required:!1,valid_types:"StructureData, NoneType",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"fleurinp",required:!0,valid_types:"FleurinpData",info:""},{name:"last_calc",required:!0,valid_types:"",info:""},{name:"output_scf_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"Invalid input configuration."},{status:233,message:"Input codes do not correspond to fleur or inpgen respectively."},{status:235,message:"Input file modification failed."},{status:236,message:"Input file was corrupted after user's modifications."},{status:360,message:"Inpgen calculation failed."},{status:361,message:"Fleur calculation failed."},{status:362,message:"SCF cycle did not lead to convergence."}]},class:"aiida_fleur.workflows.scf:FleurScfWorkChain"},"fleur.ssdisp":{description:["This workflow calculates spin spiral dispersion of a structure."],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"fleurinp",required:!1,valid_types:"FleurinpData, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"remote",required:!1,valid_types:"RemoteData, NoneType",info:""},{name:"scf",required:!1,valid_types:"Data",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_ssdisp_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:231,message:"Invalid input configuration."},{status:233,message:"Invalid code node specified, check inpgen and fleur code nodes."},{status:235,message:"Input file modification failed."},{status:236,message:"Input file was corrupted after user's modifications."},{status:334,message:"Reference calculation failed."},{status:335,message:"Found no reference calculation remote repository."},{status:336,message:"Force theorem calculation failed."}]},class:"aiida_fleur.workflows.ssdisp:FleurSSDispWorkChain"},"fleur.ssdisp_conv":{description:["This workflow calculates the Spin Spiral Dispersion of a structure."],spec:{inputs:[{name:"scf",required:!0,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_ssdisp_conv_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:230,message:"Invalid workchain parameters."},{status:340,message:"Convergence SSDisp calculation failed for all q-vectors."},{status:341,message:"Convergence SSDisp calculation failed for some q-vectors."}]},class:"aiida_fleur.workflows.ssdisp_conv:FleurSSDispConvWorkChain"},"fleur.strain":{description:["This workflow calculates the deformation potential a structure = -BdEg/dP = d(Eg)/d(ln(V))."," Calculates several unit cells with different volumes."," A Birch_Murnaghan equation of states fit determines the Bulk modulus(B) and the"," ground-state volume of the cell.",""," :params wf_parameters: Dict node, optional 'wf_parameters', protocol specifying parameter dict"," :params structure: StructureData node, 'structure' crystal structure"," :params calc_parameters: Dict node, optional 'calc_parameters' parameters for inpgen"," :params inpgen: Code node,"," :params fleur: Code node,","",""," :return output_strain_wc_para: Dict node, contains relevant output information."," about general succeed, fit results and so on."],spec:{inputs:[{name:"fleur",required:!0,valid_types:"Code",info:""},{name:"inpgen",required:!0,valid_types:"Code",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:""},{name:"calc_parameters",required:!1,valid_types:"Dict, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict, NoneType",info:""},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:""},{name:"wf_parameters",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_strain_wc_para",required:!0,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:331,message:"Invalid code node specified, check inpgen and fleur code nodes."}]},class:"aiida_fleur.workflows.strain:FleurStrainWorkChain"}},console_scripts:{"aiida-fleur":"aiida_fleur.cmdline:cmd_root"}},commits_count:202,summaryinfo:[{colorclass:"blue",text:"Calculations",count:2},{colorclass:"brown",text:"Parsers",count:2},{colorclass:"red",text:"Data",count:1},{colorclass:"green",text:"Workflows",count:19},{colorclass:"purple",text:"Console scripts",count:1}],pip_install_cmd:"pip install aiida-fleur",is_installable:"True"},"aiida-flexpart":{code_home:"https://github.com/aiidaplugins/aiida-flexpart",entry_point_prefix:"flexpart",pip_url:"git+https://github.com/aiidaplugins/aiida-flexpart",name:"aiida-flexpart",package_name:"aiida_flexpart",hosted_on:"github.com",metadata:{author:"The AiiDA Team",author_email:"aliaksandr.yakutovich@empa.ch",version:"0.1.0a0",description:"AiiDA plugin for the FLEXPART code (simulation of atmospheric transport processes).",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Framework :: AiiDA"]},aiida_version:">=1.6.5,<3.0.0",entry_points:{"aiida.calculations":{"flexpart.cosmo":"aiida_flexpart.calculations.cosmo:FlexpartCosmoCalculation"},"aiida.parsers":{"flexpart.cosmo":"aiida_flexpart.parsers.cosmo:FlexpartCosmoParser"},"aiida.workflows":{"flexpart.multi_dates":"aiida_flexpart.workflows.multi_dates_workflow:FlexpartMultipleDatesWorkflow"}},commits_count:0,development_status:"planning",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install git+https://github.com/aiidaplugins/aiida-flexpart"},"aiida-gaussian":{code_home:"https://github.com/nanotech-empa/aiida-gaussian",entry_point_prefix:"gaussian",pip_url:"aiida-gaussian",plugin_info:"https://raw.githubusercontent.com/nanotech-empa/aiida-gaussian/master/pyproject.toml",name:"aiida-gaussian",package_name:"aiida_gaussian",hosted_on:"github.com",metadata:{description:"AiiDA plugin for the Gaussian quantum chemistry software.",author:"Kristjan Eimre, Pezhman Zarabadi-Poor, Aliaksandr Yakutovich",license:"MIT",classifiers:["Development Status :: 5 - Production/Stable","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Operating System :: OS Independent","Programming Language :: Python :: 3","Topic :: Scientific/Engineering :: Chemistry","Topic :: Scientific/Engineering :: Physics","Topic :: Software Development :: Libraries :: Python Modules"],version:"2.0.4"},aiida_version:">=2.0.0,<3.0.0",entry_points:{"aiida.calculations":{gaussian:"aiida_gaussian.calculations:GaussianCalculation","gaussian.cubegen":"aiida_gaussian.calculations:CubegenCalculation","gaussian.formchk":"aiida_gaussian.calculations:FormchkCalculation"},"aiida.parsers":{"gaussian.advanced":"aiida_gaussian.parsers.gaussian:GaussianAdvancedParser","gaussian.base":"aiida_gaussian.parsers.gaussian:GaussianBaseParser","gaussian.cubegen_base":"aiida_gaussian.parsers.cubegen:CubegenBaseParser"},"aiida.workflows":{"gaussian.base":"aiida_gaussian.workchains:GaussianBaseWorkChain","gaussian.cubes":"aiida_gaussian.workchains:GaussianCubesWorkChain"}},commits_count:20,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:3},{colorclass:"brown",text:"Parsers",count:3},{colorclass:"green",text:"Workflows",count:2}],pip_install_cmd:"pip install aiida-gaussian",is_installable:"False"},"aiida-gaussian-datatypes":{code_home:"https://github.com/dev-zero/aiida-gaussian-datatypes",documentation_url:"https://github.com/dev-zero/aiida-gaussian-datatypes/blob/master/README.md",entry_point_prefix:"gaussian",pip_url:"aiida-gaussian-datatypes",plugin_info:"https://raw.github.com/dev-zero/aiida-gaussian-datatypes/master/setup.json",name:"aiida-gaussian-datatypes",package_name:"aiida_gaussian_datatypes",hosted_on:"github.com",metadata:{description:"AiiDA data plugin to manage gaussian datatypes (basis sets and pseudopotentials) as first-class citizens",author:"Tiziano Müller",author_email:"tiziano.mueller@chem.uzh.ch",license:"MIT License",home_page:"https://github.com/dev-zero/aiida-gaussian-datatypes",classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Operating System :: OS Independent","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Chemistry","Topic :: Software Development :: Libraries :: Python Modules"],version:"0.5.1"},aiida_version:">=1.6.2",entry_points:{"aiida.cmdline.data":{"gaussian.basisset":"aiida_gaussian_datatypes.basisset.cli:cli","gaussian.pseudo":"aiida_gaussian_datatypes.pseudopotential.cli:cli"},"aiida.data":{"gaussian.basisset":"aiida_gaussian_datatypes.basisset.data:BasisSet","gaussian.pseudo":"aiida_gaussian_datatypes.pseudopotential.data:Pseudopotential"},"aiida.groups":{"gaussian.basisset":"aiida_gaussian_datatypes.groups:BasisSetGroup","gaussian.pseudo":"aiida_gaussian_datatypes.groups:PseudopotentialGroup"}},commits_count:0,development_status:"beta",summaryinfo:[{colorclass:"red",text:"Data",count:2},{colorclass:"orange",text:"Other (Data commands, Groups)",count:4}],pip_install_cmd:"pip install aiida-gaussian-datatypes",is_installable:"True"},"aiida-gollum":{code_home:"https://github.com/garsua/aiida-gollum/",documentation_url:"https://aiida-gollum.readthedocs.io/",entry_point_prefix:"gollum",pip_url:"git+https://github.com/garsua/aiida-gollum",name:"aiida-gollum",package_name:"aiida_gollum",hosted_on:"github.com",metadata:{author:"Victor M. Garcia-Suarez",author_email:"vm.garcia@cinn.es",version:"0.12.0",description:"A plugin for Gollum functionality within AiiDA framework.",classifiers:["License :: OSI Approved :: MIT License","Framework :: AiiDA","Programming Language :: Python :: 2.7","Development Status :: 1 - Alpha"]},aiida_version:">=0.12.0",entry_points:{"aiida.calculations":{"gollum.gollum":"aiida_gollum.calculations.gollum:GollumCalculation"},"aiida.parsers":{"gollum.parser":"aiida_gollum.parsers.gollum:GollumParser"}},commits_count:0,development_status:"planning",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install git+https://github.com/garsua/aiida-gollum"},"aiida-graphql":{code_home:"https://github.com/dev-zero/aiida-graphql",entry_point_prefix:"graphql",pip_url:"aiida-graphql",name:"aiida-graphql",package_name:"aiida_graphql",hosted_on:"github.com",metadata:{description:"Strawberry-based GraphQL API Server for AiiDA",author:"Tiziano Müller",author_email:"tiziano.mueller@chem.uzh.ch",license:"MIT",home_page:"https://github.com/dev-zero/aiida-graphql",classifiers:["Development Status :: 3 - Alpha","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Operating System :: OS Independent","Programming Language :: Python :: 3","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Topic :: Software Development :: Libraries :: Python Modules"],version:"0.0.2"},aiida_version:">=1.0.0b6,<2.0.0",entry_points:{},commits_count:0,development_status:"alpha",summaryinfo:[],pip_install_cmd:"pip install aiida-graphql",is_installable:"True"},"aiida-gromacs":{code_home:"https://github.com/jimboid/aiida-gromacs",documentation_url:"https://aiida-gromacs.readthedocs.io/",entry_point_prefix:"gromacs",pip_url:"git+https://github.com/jimboid/aiida-gromacs",name:"aiida-gromacs",package_name:"aiida_gromacs",hosted_on:"github.com",metadata:{description:"A plugin for using GROMACS with AiiDA for molecular dymanics simulations.",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Development Status :: 3 - Alpha","Framework :: AiiDA"],author:"James Gebbie-Rayet",author_email:"james.gebbie@stfc.ac.uk"},aiida_version:">=2.0,<3",entry_points:{"aiida.data":{"gromacs.pdb2gmx":"aiida_gromacs.data.pdb2gmx:Pdb2gmxParameters","gromacs.editconf":"aiida_gromacs.data.editconf:EditconfParameters","gromacs.genion":"aiida_gromacs.data.genion:GenionParameters","gromacs.grompp":"aiida_gromacs.data.grompp:GromppParameters","gromacs.mdrun":"aiida_gromacs.data.mdrun:MdrunParameters","gromacs.solvate":"aiida_gromacs.data.solvate:SolvateParameters"},"aiida.calculations":{"gromacs.pdb2gmx":{description:["AiiDA calculation plugin wrapping the 'gmx pdb2gmx' executable.",""," AiiDA plugin wrapper for converting PDB files to GRO files."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Pdb2gmxParameters",info:"Command line parameters for gmx pdb2gmx"},{name:"pdbfile",required:!0,valid_types:"SinglefileData",info:"Input structure."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"grofile",required:!0,valid_types:"SinglefileData",info:"Output forcefield compliant file."},{name:"itpfile",required:!0,valid_types:"SinglefileData",info:"Output forcefield compliant file."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"stdout",required:!0,valid_types:"SinglefileData",info:"stdout"},{name:"topfile",required:!0,valid_types:"SinglefileData",info:"Output forcefield compliant file."},{name:"n_file",required:!1,valid_types:"SinglefileData",info:"Output index file"},{name:"q_file",required:!1,valid_types:"SinglefileData",info:"Output Structure file"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Calculation did not produce all expected output files."}]},class:"aiida_gromacs.calculations.pdb2gmx:Pdb2gmxCalculation"},"gromacs.editconf":{description:["AiiDA calculation plugin wrapping the 'gmx editconf' executable.",""," AiiDA plugin wrapper for adding a simulation box to structure file."],spec:{inputs:[{name:"grofile",required:!0,valid_types:"SinglefileData",info:"Input structure file."},{name:"parameters",required:!0,valid_types:"EditconfParameters",info:"Command line parameters for gmx editconf."},{name:"bf_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Generic data file."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"n_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Index file."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"grofile",required:!0,valid_types:"SinglefileData",info:"Output file containing simulation box."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"stdout",required:!0,valid_types:"SinglefileData",info:"stdout"},{name:"mead_file",required:!1,valid_types:"SinglefileData",info:"Coordination file for MEAD"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Calculation did not produce all expected output files."}]},class:"aiida_gromacs.calculations.editconf:EditconfCalculation"},"gromacs.genion":{description:["AiiDA calculation plugin wrapping the 'gmx genion' executable.",""," AiiDA plugin wrapper for converting PDB files to GRO files."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"GenionParameters",info:"Command line parameters for gmx genion"},{name:"topfile",required:!0,valid_types:"SinglefileData",info:"Input topology file."},{name:"tprfile",required:!0,valid_types:"SinglefileData",info:"Input tpr file."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"n_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Index file."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"grofile",required:!0,valid_types:"SinglefileData",info:"Output gro file with ions added."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"stdout",required:!0,valid_types:"SinglefileData",info:"stdout"},{name:"topfile",required:!0,valid_types:"SinglefileData",info:"Output topology with ions added."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Calculation did not produce all expected output files."}]},class:"aiida_gromacs.calculations.genion:GenionCalculation"},"gromacs.grompp":{description:["AiiDA calculation plugin wrapping the 'gmx grompp' executable.",""," AiiDA plugin wrapper for converting PDB files to GRO files."],spec:{inputs:[{name:"grofile",required:!0,valid_types:"SinglefileData",info:"Input structure"},{name:"mdpfile",required:!0,valid_types:"SinglefileData",info:"grompp run file."},{name:"parameters",required:!0,valid_types:"GromppParameters",info:"Command line parameters for gmx grompp"},{name:"topfile",required:!0,valid_types:"SinglefileData",info:"Input topology"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"e_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Energy file"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"n_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Index file"},{name:"qmi_file",required:!1,valid_types:"SinglefileData, NoneType",info:"QM input file"},{name:"r_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Structure file"},{name:"rb_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Structure file"},{name:"ref_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Full precision trajectory file"},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"t_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Full precision trajectory file"}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"stdout",required:!0,valid_types:"SinglefileData",info:"stdout"},{name:"tprfile",required:!0,valid_types:"SinglefileData",info:"Output gro file ready for adding ions."},{name:"imd_file",required:!1,valid_types:"SinglefileData",info:"Coordinate file in Gromos-87 format"},{name:"po_file",required:!1,valid_types:"SinglefileData",info:"grompp input file with MD parameters"},{name:"pp_file",required:!1,valid_types:"SinglefileData",info:"Topology file"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Calculation did not produce all expected output files."}]},class:"aiida_gromacs.calculations.grompp:GromppCalculation"},"gromacs.mdrun":{description:["AiiDA calculation plugin wrapping the 'gmx mdrun' executable.",""," AiiDA plugin wrapper for converting PDB files to GRO files."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"MdrunParameters",info:"Command line parameters for gmx mdrun"},{name:"tprfile",required:!0,valid_types:"SinglefileData",info:"Input structure."},{name:"awh_file",required:!1,valid_types:"SinglefileData, NoneType",info:"xvgr/xmgr file"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"cpi_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Checkpoint file"},{name:"ei_file",required:!1,valid_types:"SinglefileData, NoneType",info:"ED sampling input"},{name:"membed_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Generic data file"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"mn_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Index file"},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"mp_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Topology file"},{name:"multidir_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Run directory"},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"rerun_file",required:!1,valid_types:"SinglefileData, NoneType",info:"Trajectory: xtc trr cpt gro g96 pdb tng"},{name:"table_file",required:!1,valid_types:"SinglefileData, NoneType",info:"xvgr/xmgr file"},{name:"tableb_file",required:!1,valid_types:"SinglefileData, NoneType",info:"xvgr/xmgr file"},{name:"tablep_file",required:!1,valid_types:"SinglefileData, NoneType",info:"xvgr/xmgr file"}],outputs:[{name:"enfile",required:!0,valid_types:"SinglefileData",info:"Output energy file."},{name:"grofile",required:!0,valid_types:"SinglefileData",info:"Output structure file."},{name:"logfile",required:!0,valid_types:"SinglefileData",info:"Output log file."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"stdout",required:!0,valid_types:"SinglefileData",info:"stdout"},{name:"trrfile",required:!0,valid_types:"SinglefileData",info:"Output trajectory."},{name:"cptfile",required:!1,valid_types:"SinglefileData",info:"Checkpoint file."},{name:"dhdl_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"eo_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"field_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"if_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"mtx_file",required:!1,valid_types:"SinglefileData",info:"Hessian Matrix"},{name:"pf_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"px_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"ra_file",required:!1,valid_types:"SinglefileData",info:"Log file"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"ro_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"rs_file",required:!1,valid_types:"SinglefileData",info:"Log file"},{name:"rt_file",required:!1,valid_types:"SinglefileData",info:"Log file"},{name:"swap_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"tpi_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"tpid_file",required:!1,valid_types:"SinglefileData",info:"xvgr/xmgr file"},{name:"x_file",required:!1,valid_types:"SinglefileData",info:"Compressed trajectory (tng format or portable xdr format)"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Calculation did not produce all expected output files."}]},class:"aiida_gromacs.calculations.mdrun:MdrunCalculation"},"gromacs.solvate":{description:["AiiDA calculation plugin wrapping the 'gmx solvate' executable.",""," AiiDA plugin wrapper for solvating a molecular system."],spec:{inputs:[{name:"grofile",required:!0,valid_types:"SinglefileData",info:"Input structure"},{name:"parameters",required:!0,valid_types:"SolvateParameters",info:"Command line parameters for gmx solvate."},{name:"topfile",required:!0,valid_types:"SinglefileData",info:"Input topology"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"grofile",required:!0,valid_types:"SinglefileData",info:"Output solvated gro file."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"stdout",required:!0,valid_types:"SinglefileData",info:"stdout"},{name:"topfile",required:!0,valid_types:"SinglefileData",info:"Output topology file."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Calculation did not produce all expected output files."}]},class:"aiida_gromacs.calculations.solvate:SolvateCalculation"},"general-MD":{description:["AiiDA calculation plugin wrapping an executable with user defined"," input and output files."],spec:{inputs:[{name:"code",required:!0,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"command",required:!1,valid_types:"Str, NoneType",info:"The command used to execute the job."},{name:"input_files",required:!1,valid_types:"SinglefileData",info:"Dictionary of input files."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"output_files",required:!1,valid_types:"List, NoneType",info:"List of output file names."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"log",required:!1,valid_types:"SinglefileData",info:"link to the default file.out."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Calculation did not produce all expected output files."},{status:301,message:"Specified output file not produced by command."}]},class:"aiida_gromacs.calculations.generalMD:GeneralCalculation"}},"aiida.parsers":{"gromacs.pdb2gmx":"aiida_gromacs.parsers.pdb2gmx:Pdb2gmxParser","gromacs.editconf":"aiida_gromacs.parsers.editconf:EditconfParser","gromacs.genion":"aiida_gromacs.parsers.genion:GenionParser","gromacs.grompp":"aiida_gromacs.parsers.grompp:GromppParser","gromacs.mdrun":"aiida_gromacs.parsers.mdrun:MdrunParser","gromacs.solvate":"aiida_gromacs.parsers.solvate:SolvateParser","general-MD":"aiida_gromacs.parsers.generalMD:GeneralParser"},"aiida.workflows":{"gromacs.setup":{description:["WorkChain for setting up a gromacs simulation automatically."],spec:{inputs:[{name:"editconfparameters",required:!0,valid_types:"EditconfParameters",info:"Command line parameters for gmx editconf"},{name:"genionparameters",required:!0,valid_types:"GenionParameters",info:"Command line parameters for gmx genion"},{name:"gromppionsparameters",required:!0,valid_types:"GromppParameters",info:"Command line parameters for gmx grompp"},{name:"gromppminparameters",required:!0,valid_types:"GromppParameters",info:"Command line parameters for gmx grompp minimisation run"},{name:"gromppnptparameters",required:!0,valid_types:"GromppParameters",info:"Command line parameters for gmx grompp npt equilibration run"},{name:"gromppnvtparameters",required:!0,valid_types:"GromppParameters",info:"Command line parameters for gmx grompp nvt equilibration run"},{name:"gromppprodparameters",required:!0,valid_types:"GromppParameters",info:"Command line parameters for gmx grompp production run"},{name:"ionsmdp",required:!0,valid_types:"SinglefileData",info:"MD parameters for adding ions."},{name:"local_code",required:!0,valid_types:"Code",info:""},{name:"mdrunparameters",required:!0,valid_types:"MdrunParameters",info:"Command line parameters for gmx mdrun production run"},{name:"minimiseparameters",required:!0,valid_types:"MdrunParameters",info:"Command line parameters for gmx mdrun minimisation run"},{name:"minmdp",required:!0,valid_types:"SinglefileData",info:"MD parameters for minimisation."},{name:"nptmdp",required:!0,valid_types:"SinglefileData",info:"MD parameters for NPT equilibration."},{name:"nptparameters",required:!0,valid_types:"MdrunParameters",info:"Command line parameters for gmx mdrun npt equilibration run"},{name:"nvtmdp",required:!0,valid_types:"SinglefileData",info:"MD parameters for NVT equilibration."},{name:"nvtparameters",required:!0,valid_types:"MdrunParameters",info:"Command line parameters for gmx mdrun nvt equilibration run"},{name:"pdb2gmxparameters",required:!0,valid_types:"Pdb2gmxParameters",info:"Command line parameters for gmx pdb2gmx"},{name:"pdbfile",required:!0,valid_types:"SinglefileData",info:"Input structure."},{name:"prodmdp",required:!0,valid_types:"SinglefileData",info:"MD parameters for production run."},{name:"solvateparameters",required:!0,valid_types:"SolvateParameters",info:"Command line parameters for gmx solvate"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"remote_code",required:!1,valid_types:"Code, NoneType",info:""}],outputs:[{name:"result",required:!0,valid_types:"",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_gromacs.workflows.simsetup:SetupWorkChain"}}},commits_count:99,development_status:"alpha",summaryinfo:[{colorclass:"blue",text:"Calculations",count:7},{colorclass:"brown",text:"Parsers",count:7},{colorclass:"red",text:"Data",count:6},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install git+https://github.com/jimboid/aiida-gromacs",is_installable:"True"},"aiida-grouppathx":{code_home:"https://github.com/zhubonan/aiida-grouppathx",development_status:"beta",entry_point_prefix:"grouppathx",pip_url:"aiida-grouppathx",name:"aiida-grouppathx",package_name:"aiida_grouppathx",hosted_on:"github.com",metadata:{description:"AiiDA plugin provides the GroupPathX class",author_email:"Bonan Zhu ",classifiers:["Development Status :: 3 - Alpha","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python"],version:"0.2.0"},aiida_version:">=1.6.4,<3",entry_points:{"aiida.cmdline.data":{gpx:"aiida_grouppathx.cli:grouppathx_cli"}},commits_count:7,summaryinfo:[{colorclass:"orange",text:"Other (Data commands)",count:1}],pip_install_cmd:"pip install aiida-grouppathx",is_installable:"True"},"aiida-gudhi":{code_home:"https://github.com/ltalirz/aiida-gudhi",development_status:"beta",entry_point_prefix:"gudhi",pip_url:"aiida-gudhi",plugin_info:"https://raw.github.com/ltalirz/aiida-gudhi/master/setup.json",name:"aiida-gudhi",package_name:"aiida_gudhi",hosted_on:"github.com",metadata:{description:"AiiDA plugin for the [GUDHI](http://gudhi.gforge.inria.fr/) library for topological data analysis.",author:"Leopold Talirz",author_email:"leopold.talirz@gmail.com",license:"MIT",home_page:"https://github.com/ltalirz/aiida-gudhi",classifiers:["Programming Language :: Python"],version:"0.1.0a3"},aiida_version:"*",entry_points:{"aiida.calculations":{"gudhi.rdm":"aiida_gudhi.calculations.rips:RipsDistanceMatrixCalculation"},"aiida.data":{"gudhi.rdm":"aiida_gudhi.data.rips:RipsDistanceMatrixParameters"},"aiida.parsers":{"gudhi.rdm":"aiida_gudhi.parsers.rips:RipsParser"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:1}],pip_install_cmd:"pip install --pre aiida-gudhi",is_installable:"True"},"aiida-gulp":{code_home:"https://github.com/aiidaplugins/aiida-gulp",development_status:"beta",documentation_url:"https://aiida-gulp.readthedocs.io",entry_point_prefix:"gulp",pip_url:"aiida-gulp",plugin_info:"https://raw.githubusercontent.com/aiidaplugins/aiida-gulp/master/setup.json",name:"aiida-gulp",package_name:"aiida_gulp",hosted_on:"github.com",metadata:{description:"AiiDA plugin for running the GULP MD code",author:"Chris Sewell",author_email:"chrisj_sewell@hotmail.com",license:"MIT",home_page:"https://github.com/chrisjsewell/aiida-gulp",classifiers:["Framework :: AiiDA","Programming Language :: Python","Programming Language :: Python :: 2.7","Programming Language :: Python :: 3.6","Topic :: Scientific/Engineering :: Chemistry","Topic :: Scientific/Engineering :: Physics"],version:"0.10.0b5"},aiida_version:"1.0.0b5",entry_points:{"aiida.calculations":{"gulp.fitting":"aiida_gulp.calculations.gulp_fitting:GulpFittingCalculation","gulp.optimize":"aiida_gulp.calculations.gulp_optimize:GulpOptCalculation","gulp.single":"aiida_gulp.calculations.gulp_single:GulpSingleCalculation"},"aiida.cmdline.data":{"gulp.potentials":"aiida_gulp.cmndline.potentials:potentials"},"aiida.data":{"gulp.potential":"aiida_gulp.data.potential:EmpiricalPotential","gulp.symmetry":"aiida_gulp.data.symmetry:SymmetryData"},"aiida.parsers":{"gulp.fitting":"aiida_gulp.parsers.parse_fitting:GulpFittingParser","gulp.optimize":"aiida_gulp.parsers.parse_opt:GulpOptParser","gulp.single":"aiida_gulp.parsers.parse_single:GulpSingleParser"},"aiida.workflows":{},console_scripts:{gulp_mock:"aiida_gulp.tests.mock_gulp:main"},"gulp.potentials":{lj:"aiida_gulp.potentials.lj:PotentialWriterLJ",reaxff:"aiida_gulp.potentials.reaxff:PotentialWriterReaxff"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:3},{colorclass:"brown",text:"Parsers",count:3},{colorclass:"red",text:"Data",count:2},{colorclass:"purple",text:"Console scripts",count:1},{colorclass:"orange",text:"Other (Data commands, Gulp potentials)",count:3}],pip_install_cmd:"pip install --pre aiida-gulp",is_installable:"True"},"aiida-kkr":{code_home:"https://github.com/JuDFTteam/aiida-kkr/tree/develop",development_status:"stable",documentation_url:"https://aiida-kkr.readthedocs.io/",entry_point_prefix:"kkr",pip_url:"aiida-kkr",name:"aiida-kkr",package_name:"aiida_kkr",hosted_on:"github.com",metadata:{description:"AiiDA plugin for the JuKKR codes",author_email:"Philipp Ruessmann , Jens Broeder , Fabian Bertoldo ",classifiers:["Development Status :: 4 - Beta","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Physics"],version:"2.0.0"},aiida_version:null,entry_points:{},commits_count:90,summaryinfo:[],pip_install_cmd:"pip install aiida-kkr",is_installable:"True"},"aiida-lammps":{code_home:"https://github.com/aiidaplugins/aiida-lammps",development_status:"beta",entry_point_prefix:"lammps",pip_url:"git+https://github.com/aiidaplugins/aiida-lammps",name:"aiida-lammps",package_name:"aiida_lammps",hosted_on:"github.com",metadata:{author:"Abel Carreras, Chris Sewell",author_email:"chrisj_sewell@hotmail.com",version:"0.8.0",description:"AiiDA plugin for LAMMPS",classifiers:["Programming Language :: Python","Programming Language :: Python :: 3","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Chemistry","Topic :: Scientific/Engineering :: Physics","Framework :: AiiDA"]},aiida_version:">=1.4.0,<2.0.0",entry_points:{"aiida.calculations":{"lammps.combinate":"aiida_lammps.calculations.lammps.combinate:CombinateCalculation","lammps.force":"aiida_lammps.calculations.lammps.force:ForceCalculation","lammps.md":"aiida_lammps.calculations.lammps.md:MdCalculation","lammps.md.multi":"aiida_lammps.calculations.lammps.md_multi:MdMultiCalculation","lammps.optimize":"aiida_lammps.calculations.lammps.optimize:OptimizeCalculation",dynaphopy:"aiida_lammps.calculations.dynaphopy: DynaphopyCalculation"},"aiida.parsers":{"lammps.force":"aiida_lammps.parsers.lammps.force:ForceParser","lammps.md":"aiida_lammps.parsers.lammps.md:MdParser","lammps.md.multi":"aiida_lammps.parsers.lammps.md_multi:MdMultiParser","lammps.optimize":"aiida_lammps.parsers.lammps.optimize:OptimizeParser",dynaphopy:"aiida_lammps.parsers.dynaphopy: DynaphopyParser"},"aiida.data":{"lammps.potential":"aiida_lammps.data.potential:EmpiricalPotential","lammps.trajectory":"aiida_lammps.data.trajectory:LammpsTrajectory"},"lammps.potentials":{eam:"aiida_lammps.data.pot_plugins.eam:EAM",lennard_jones:"aiida_lammps.data.pot_plugins.lennard_jones:LennardJones",reaxff:"aiida_lammps.data.pot_plugins.reaxff:Reaxff",tersoff:"aiida_lammps.data.pot_plugins.tersoff:Tersoff"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:6},{colorclass:"brown",text:"Parsers",count:5},{colorclass:"red",text:"Data",count:2},{colorclass:"orange",text:"Other (Lammps potentials)",count:4}],pip_install_cmd:"pip install git+https://github.com/aiidaplugins/aiida-lammps",is_installable:"True"},"aiida-lsmo":{code_home:"https://github.com/lsmo-epfl/aiida-lsmo",development_status:"stable",entry_point_prefix:"lsmo",pip_url:"git+https://github.com/lsmo-epfl/aiida-lsmo",name:"aiida-lsmo",package_name:"aiida_lsmo",hosted_on:"github.com",metadata:{author:"Aliaksandr Yakutovich, Daniele Ongari, Leopold Talirz",author_email:"aliaksandr.yakutovich@epfl.ch",version:"1.0.0",description:"AiiDA workflows for the LSMO laboratory at EPFL",classifiers:["Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7"]},aiida_version:">=1.0.0",entry_points:{"aiida.calculations":{"lsmo.ff_builder":{description:["AiiDA calcfunction to assemble force filed parameters into SinglefileData for Raspa."],spec:{inputs:[{name:"params",required:!0,valid_types:"Data",info:""},{name:"cif_molecule",required:!1,valid_types:"Data, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.calcfunctions:ff_builder"},"lsmo.calc_ch4_working_cap":{description:["Compute the CH4 working capacity from the output_parameters Dict of IsothermWorkChain."," This must have run calculations at 5.8 and 65.0 bar (at 298K), which are the standard reference for the evaluation.",""," The results can be compared with Simon2015 (10.1039/C4EE03515A)."],spec:{inputs:[{name:"isot_dict",required:!0,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.calcfunctions:calc_ch4_working_cap"},"lsmo.calc_h2_working_cap":{description:["Compute the H2 working capacity from the output_parameters Dict of MultiTempIsothermWorkChain."," This must have run calculations at 1, 5 and 100 bar at 77, 198, 298 K."," The US DOE Target for the Onboard Storage of Hydrogen Vehicles set the bar to 4.5 wt% and 30 g/L (Kapelewski2018)."," Case-A: near-ambient-T adsorption, 100bar/198K to 5bar/298K (cf. Kapelewski2018, 10.1021/acs.chemmater.8b03276)"," ....... Ni2(m-dobdc), experimental: 23.0 g/L"," Case-B: low T adsorption, 100-5bar at 77K (cf. Ahmed2019, 10.1038/s41467-019-09365-w)"," ....... NU-100, best experimental: 35.5 g/L"," Case-C: low T adsorption at low discharge, 100-1bar at 77K (cf. Thornton2017, 10.1021/acs.chemmater.6b04933)"," ....... hypMOF-5059389, best simulated: 40.0 g/L"],spec:{inputs:[{name:"isotmt_dict",required:!0,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.calcfunctions:calc_h2_working_cap"},"lsmo.calc_o2_working_cap":{description:["Compute the O2 working capacity from the output_parameters Dict of IsothermWorkChain."," This must have run calculations at 5 and 140.0 bar (at 298K), to be consistent with the screening of Moghadam2018"," (10.1038/s41467-018-03892-8), for which the MOF ANUGIA (UMCM-152) was found to have a volumetric working capacity"," of 249 vSTP/v (simulations are nearly identical to experiments)."," Consider that, at the same conditions, an empty thank can only store 136 vSTP/v, and a comparable working capacity"," can only br obtained compressing till 300bar."],spec:{inputs:[{name:"isot_dict",required:!0,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.calcfunctions:calc_o2_working_cap"},"lsmo.calc_selectivity":{description:["Compute the selectivity of gas A on gas B as S = kH_a/kH_b."," Note that if the material is not porous to one of the materials, the result is simply {'is_porous': False}."," To maintain the comptaibility with v1, intead of checking 'is_porous', it checks for the henry_coefficient_average"," key in the Dict."],spec:{inputs:[{name:"isot_dict_a",required:!0,valid_types:"Data",info:""},{name:"isot_dict_b",required:!0,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.calcfunctions:calc_selectivity"}},"aiida.parsers":{"lsmo.cp2k_bsse_parser":"aiida_lsmo.parsers:Cp2kBsseParser","lsmo.cp2k_advanced_parser":"aiida_lsmo.parsers:Cp2kAdvancedParser"},"aiida.workflows":{"lsmo.binding_site":{description:["A workchain that combines SimAnnealing & Cp2kBindingEnergy"],spec:{inputs:[{name:"cp2k_base",required:!0,valid_types:"",info:""},{name:"molecule",required:!0,valid_types:"Str, Dict",info:"Adsorbate molecule: settings to be read from the yaml.Advanced: input a Dict for non-standard settings."},{name:"parameters",required:!0,valid_types:"Dict",info:"Parameters for the SimAnnealing workchain: will be merged with default ones."},{name:"raspa_base",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"CifData",info:"Adsorbent framework CIF."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"protocol_modify",required:!1,valid_types:"Dict",info:"Specify custom settings that overvrite the yaml settings"},{name:"protocol_tag",required:!1,valid_types:"Str",info:"The tag of the protocol tag.yaml. NOTE: only the settings are read, stage is set to GEO_OPT."},{name:"protocol_yaml",required:!1,valid_types:"SinglefileData",info:"Specify a custom yaml file. NOTE: only the settings are read, stage is set to GEO_OPT."},{name:"starting_settings_idx",required:!1,valid_types:"Int",info:"If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}"}],outputs:[{name:"dft",required:!0,valid_types:"",info:""},{name:"ff",required:!0,valid_types:"",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.workchains:BindingSiteWorkChain"},"lsmo.cp2k_binding_energy":{description:["Submits Cp2kBase work chain for structure + molecule system, first optimizing the geometry of the molecule and"," later computing the BSSE corrected interaction energy."," This work chain is inspired to Cp2kMultistage, and shares some logics and data from it."],spec:{inputs:[{name:"cp2k_base",required:!0,valid_types:"",info:""},{name:"molecule",required:!0,valid_types:"StructureData",info:"Input molecule in the unit cell of the structure."},{name:"structure",required:!0,valid_types:"StructureData",info:"Input structure that contains the molecule."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"protocol_modify",required:!1,valid_types:"Dict",info:"Specify custom settings that overvrite the yaml settings"},{name:"protocol_tag",required:!1,valid_types:"Str",info:"The tag of the protocol tag.yaml. NOTE: only the settings are read, stage is set to GEO_OPT."},{name:"protocol_yaml",required:!1,valid_types:"SinglefileData",info:"Specify a custom yaml file. NOTE: only the settings are read, stage is set to GEO_OPT."},{name:"starting_settings_idx",required:!1,valid_types:"Int",info:"If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}"}],outputs:[{name:"loaded_molecule",required:!0,valid_types:"StructureData",info:"Molecule geometry in the unit cell."},{name:"loaded_structure",required:!0,valid_types:"StructureData",info:"Geometry of the system with both fragments."},{name:"output_parameters",required:!0,valid_types:"Dict",info:"Info regarding the binding energy of the system."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:901,message:"Specified starting_settings_idx that is not existing, or any in between 0 and idx is missing"},{status:902,message:"Settings for Stage0 are not ok but there are no more robust settings to try"},{status:903,message:"Something important was not printed correctly and the parsing of the first calculation failed"}]},class:"aiida_lsmo.workchains.cp2k_binding_energy:Cp2kBindingEnergyWorkChain"},"lsmo.cp2k_multistage":{description:["Submits Cp2kBase workchains for ENERGY, GEO_OPT, CELL_OPT and MD jobs iteratively"," The protocol_yaml file contains a series of settings_x and stage_x:"," the workchains starts running the settings_0/stage_0 calculation, and, in case of a failure, changes the settings"," untill the SCF of stage_0 converges. Then it uses the same settings to run the next stages (i.e., stage_1, etc.)."],spec:{inputs:[{name:"cp2k_base",required:!0,valid_types:"",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"min_cell_size",required:!1,valid_types:"Float",info:"To avoid using k-points, extend the cell so that min(perp_width)>min_cell_size"},{name:"parent_calc_folder",required:!1,valid_types:"RemoteData",info:"Provide an initial parent folder that contains the wavefunction for restart"},{name:"protocol_modify",required:!1,valid_types:"Dict",info:"Specify custom settings that overvrite the yaml settings"},{name:"protocol_tag",required:!1,valid_types:"Str",info:"The tag of the protocol to be read from {tag}.yaml unless protocol_yaml input is specified"},{name:"protocol_yaml",required:!1,valid_types:"SinglefileData",info:"Specify a custom yaml file with the multistage settings (and ignore protocol_tag)"},{name:"starting_settings_idx",required:!1,valid_types:"Int",info:"If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}"},{name:"structure",required:!1,valid_types:"StructureData",info:"Input structure"}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"last_input_parameters",required:!1,valid_types:"Dict",info:"CP2K input parameters used (and possibly working) used in the last stage"},{name:"output_parameters",required:!1,valid_types:"Dict",info:"Output CP2K parameters of all the stages, merged together"},{name:"output_structure",required:!1,valid_types:"StructureData",info:"Processed structure (missing if only ENERGY calculation is performed)"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:901,message:"Specified starting_settings_idx that is not existing, or any in between 0 and idx is missing"},{status:902,message:"Settings for Stage0 are not ok but there are no more robust settings to try"},{status:903,message:"Something important was not printed correctly and the parsing of the first calculation failed"}]},class:"aiida_lsmo.workchains:Cp2kMultistageWorkChain"},"lsmo.cp2k_multistage_ddec":{description:["A workchain that combines: Cp2kMultistageWorkChain + Cp2kDdecWorkChain"],spec:{inputs:[{name:"cp2k_base",required:!0,valid_types:"",info:""},{name:"ddec",required:!0,valid_types:"",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"min_cell_size",required:!1,valid_types:"Float",info:"To avoid using k-points, extend the cell so that min(perp_width)>min_cell_size"},{name:"parent_calc_folder",required:!1,valid_types:"RemoteData",info:"Provide an initial parent folder that contains the wavefunction for restart"},{name:"protocol_modify",required:!1,valid_types:"Dict",info:"Specify custom settings that overvrite the yaml settings"},{name:"protocol_tag",required:!1,valid_types:"Str",info:"The tag of the protocol to be read from {tag}.yaml unless protocol_yaml input is specified"},{name:"protocol_yaml",required:!1,valid_types:"SinglefileData",info:"Specify a custom yaml file with the multistage settings (and ignore protocol_tag)"},{name:"starting_settings_idx",required:!1,valid_types:"Int",info:"If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}"},{name:"structure",required:!1,valid_types:"StructureData",info:"Input structure"}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"structure_ddec",required:!0,valid_types:"CifData",info:"structure with DDEC charges"},{name:"last_input_parameters",required:!1,valid_types:"Dict",info:"CP2K input parameters used (and possibly working) used in the last stage"},{name:"output_parameters",required:!1,valid_types:"Dict",info:"Output CP2K parameters of all the stages, merged together"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.workchains:Cp2kMultistageDdecWorkChain"},"lsmo.isotherm":{description:["Workchain that computes volpo and blocking spheres: if accessible volpo>0"," it also runs a raspa widom calculation for the Henry coefficient."],spec:{inputs:[{name:"molecule",required:!0,valid_types:"Str, Dict",info:"Adsorbate molecule: settings to be read from the yaml.Advanced: input a Dict for non-standard settings."},{name:"parameters",required:!0,valid_types:"Dict",info:"Parameters for the Isotherm workchain (see workchain.schema for default values)."},{name:"raspa_base",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"CifData",info:"Adsorbent framework CIF."},{name:"zeopp",required:!0,valid_types:"",info:""},{name:"geometric",required:!1,valid_types:"Dict",info:"[Only used by IsothermMultiTempWorkChain] Already computed geometric properties"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"Results of the single temperature wc: keys can vay depending on is_porous and is_kh_enough booleans."},{name:"block",required:!1,valid_types:"SinglefileData",info:"Blocked pockets fileoutput file."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.workchains:IsothermWorkChain"},"lsmo.isotherm_multi_temp":{description:["Run IsothermWorkChain for multiple temperatures: first compute geometric properties"," and then submit Widom+GCMC at different temperatures in parallel"],spec:{inputs:[{name:"molecule",required:!0,valid_types:"Str, Dict",info:"Adsorbate molecule: settings to be read from the yaml.Advanced: input a Dict for non-standard settings."},{name:"parameters",required:!0,valid_types:"Dict",info:"Parameters for the Isotherm workchain (see workchain.schema for default values)."},{name:"raspa_base",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"CifData",info:"Adsorbent framework CIF."},{name:"zeopp",required:!0,valid_types:"",info:""},{name:"geometric",required:!1,valid_types:"Dict",info:"[Only used by IsothermMultiTempWorkChain] Already computed geometric properties"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"Results of isotherms run at different temperatures."},{name:"block",required:!1,valid_types:"SinglefileData",info:"Blocked pockets fileoutput file."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.workchains:IsothermMultiTempWorkChain"},"lsmo.isotherm_calc_pe":{description:["Compute CO2 parassitic energy (PE) after running IsothermWorkChain for CO2 and N2 at 300K."],spec:{inputs:[{name:"raspa_base",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"CifData",info:"Adsorbent framework CIF."},{name:"zeopp",required:!0,valid_types:"",info:""},{name:"geometric",required:!1,valid_types:"Dict",info:"[Only used by IsothermMultiTempWorkChain] Already computed geometric properties"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"parameters",required:!1,valid_types:"Dict",info:"Parameters for Isotherm work chain"},{name:"pe_parameters",required:!1,valid_types:"Dict",info:"Parameters for PE process modelling"}],outputs:[{name:"co2",required:!0,valid_types:"Data",info:""},{name:"n2",required:!0,valid_types:"Data",info:""},{name:"output_parameters",required:!0,valid_types:"Dict",info:"Output parmaters of a calc_PE calculations"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.workchains:IsothermCalcPEWorkChain"},"lsmo.zeopp_multistage_ddec":{description:["A workchain that combines: Zeopp + Cp2kMultistageWorkChain + Cp2kDdecWorkChain + Zeopp"],spec:{inputs:[{name:"cp2k_base",required:!0,valid_types:"",info:""},{name:"ddec",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"CifData",info:"input structure"},{name:"zeopp",required:!0,valid_types:"",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"min_cell_size",required:!1,valid_types:"Float",info:"To avoid using k-points, extend the cell so that min(perp_width)>min_cell_size"},{name:"parent_calc_folder",required:!1,valid_types:"RemoteData",info:"Provide an initial parent folder that contains the wavefunction for restart"},{name:"protocol_modify",required:!1,valid_types:"Dict",info:"Specify custom settings that overvrite the yaml settings"},{name:"protocol_tag",required:!1,valid_types:"Str",info:"The tag of the protocol to be read from {tag}.yaml unless protocol_yaml input is specified"},{name:"protocol_yaml",required:!1,valid_types:"SinglefileData",info:"Specify a custom yaml file with the multistage settings (and ignore protocol_tag)"},{name:"starting_settings_idx",required:!1,valid_types:"Int",info:"If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}"}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"structure_ddec",required:!0,valid_types:"CifData",info:"structure with DDEC charges"},{name:"zeopp_after_opt",required:!0,valid_types:"Data",info:""},{name:"zeopp_before_opt",required:!0,valid_types:"Data",info:""},{name:"last_input_parameters",required:!1,valid_types:"Dict",info:"CP2K input parameters used (and possibly working) used in the last stage"},{name:"output_parameters",required:!1,valid_types:"Dict",info:"Output CP2K parameters of all the stages, merged together"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.workchains:ZeoppMultistageDdecWorkChain"},"lsmo.sim_annealing":{description:["A work chain to compute the minimum energy geometry of a molecule inside a framework, using simulated annealing,"," i.e., decreasing the temperature of a Monte Carlo simulation and finally running and energy minimization step."],spec:{inputs:[{name:"molecule",required:!0,valid_types:"Str, Dict",info:"Adsorbate molecule: settings to be read from the yaml.Advanced: input a Dict for non-standard settings."},{name:"parameters",required:!0,valid_types:"Dict",info:"Parameters for the SimAnnealing workchain: will be merged with default ones."},{name:"raspa_base",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"CifData",info:"Adsorbent framework CIF."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"loaded_molecule",required:!0,valid_types:"CifData",info:"CIF containing the final postition of the molecule."},{name:"loaded_structure",required:!0,valid_types:"CifData",info:"CIF containing the loaded structure."},{name:"output_parameters",required:!1,valid_types:"Dict",info:"Information about the final configuration."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.workchains.sim_annealing:SimAnnealingWorkChain"},"lsmo.nanoporous_screening_1":{description:["A workchain that combines: ZeoppMultistageDdecWorkChain wc1 and IsothermCalcPEWorkChain wc2."," In future I will use this to include more applications to run in parallel."],spec:{inputs:[{name:"cp2k_base",required:!0,valid_types:"",info:""},{name:"ddec",required:!0,valid_types:"",info:""},{name:"raspa_base",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"CifData",info:"input structure"},{name:"zeopp",required:!0,valid_types:"",info:""},{name:"geometric",required:!1,valid_types:"Dict",info:"[Only used by IsothermMultiTempWorkChain] Already computed geometric properties"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"min_cell_size",required:!1,valid_types:"Float",info:"To avoid using k-points, extend the cell so that min(perp_width)>min_cell_size"},{name:"parameters",required:!1,valid_types:"Dict",info:"Parameters for Isotherm work chain"},{name:"parent_calc_folder",required:!1,valid_types:"RemoteData",info:"Provide an initial parent folder that contains the wavefunction for restart"},{name:"pe_parameters",required:!1,valid_types:"Dict",info:"Parameters for PE process modelling"},{name:"protocol_modify",required:!1,valid_types:"Dict",info:"Specify custom settings that overvrite the yaml settings"},{name:"protocol_tag",required:!1,valid_types:"Str",info:"The tag of the protocol to be read from {tag}.yaml unless protocol_yaml input is specified"},{name:"protocol_yaml",required:!1,valid_types:"SinglefileData",info:"Specify a custom yaml file with the multistage settings (and ignore protocol_tag)"},{name:"starting_settings_idx",required:!1,valid_types:"Int",info:"If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}"}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_lsmo.workchains:NanoporousScreening1WorkChain"}}},commits_count:17,summaryinfo:[{colorclass:"blue",text:"Calculations",count:5},{colorclass:"brown",text:"Parsers",count:2},{colorclass:"green",text:"Workflows",count:10}],pip_install_cmd:"pip install git+https://github.com/lsmo-epfl/aiida-lsmo",is_installable:"True"},"aiida-metavo-scheduler":{code_home:"https://github.com/pzarabadip/aiida-metavo-scheduler",development_status:"stable",entry_point_prefix:"metavo_scheduler",pip_url:"git+https://github.com/pzarabadip/aiida-metavo-scheduler",name:"aiida-metavo-scheduler",package_name:"aiida_metavo_scheduler",hosted_on:"github.com",metadata:{author:"Pezhman Zarabadi-Poor",author_email:"pzarabadip@gmail.com",version:"1.0.0",description:"",classifiers:["Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Topic :: Scientific/Engineering"]},aiida_version:">=1.0.0,<1.6",entry_points:{"aiida.cmdline.computer.configure":{sshmetavo:"aiida_metavo_scheduler.metavo.ssh_metavo:CONFIGURE_SSH_CMD"},"aiida.schedulers":{pbsprometavo:"aiida_metavo_scheduler.metavo.pbspro_metavo:PbsproSchedulerMetaVO"},"aiida.transports":{sshmetavo:"aiida_metavo_scheduler.metavo.ssh_metavo:SshTransport"}},commits_count:0,summaryinfo:[{colorclass:"orange",text:"Other (Cmdline computer configure, Schedulers, Transports)",count:3}],pip_install_cmd:"pip install git+https://github.com/pzarabadip/aiida-metavo-scheduler",is_installable:"False"},"aiida-mpds":{code_home:"https://github.com/mpds-io/mpds-aiida",development_status:"beta",documentation_url:"https://github.com/mpds-io/mpds-aiida",entry_point_prefix:"mpds",pip_url:"git+https://github.com/mpds-io/mpds-aiida",name:"aiida-mpds",package_name:"aiida_mpds",hosted_on:"github.com",metadata:{author:"Andrey Sobolev",author_email:"as@tilde.pro",version:"",description:"Aiida workflows for MPDS based on CRYSTAL",classifiers:["Programming Language :: Python","Programming Language :: Python :: 3.5","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering","Topic :: Scientific/Engineering :: Chemistry","Topic :: Scientific/Engineering :: Physics","Topic :: Scientific/Engineering :: Information Analysis","Framework :: AiiDA"]},aiida_version:">=1.0.1",entry_points:{"aiida.workflows":{"crystal.mpds":"mpds_aiida.workflows.mpds:MPDSStructureWorkchain","crystal.cif":"mpds_aiida.workflows.cif:CIFStructureWorkchain","crystal.aiida":"mpds_aiida.workflows.aiida:AiidaStructureWorkchain"}},commits_count:9,summaryinfo:[{colorclass:"green",text:"Workflows",count:3}],pip_install_cmd:"pip install git+https://github.com/mpds-io/mpds-aiida",is_installable:"False"},"aiida-muon":{entry_point_prefix:"muon",code_home:"https://github.com/positivemuon/aiida-muon",version_file:"https://github.com/positivemuon/aiida-muon/blob/main/aiida_muon/__init__.py",pip_url:"git+https://github.com/positivemuon/aiida-muon",name:"aiida-muon",package_name:"aiida_muon",hosted_on:"github.com",metadata:{description:"aiida-muon is allows to find candiate muon implantation sites and hyperfine field by DFT supercell relaxations and from further symmetry and kinetics analysis. ",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Development Status :: 2 - Pre-Alpha","Framework :: AiiDA"],author:"Muon group Parma"},aiida_version:">=2.0,<3",entry_points:{"aiida.workflows":{"muon.find_muon":{description:["FindMuonWorkChain finds the candidate implantation site for a positive muon."," It first performs DFT relaxation calculations for a set of initial muon sites."," It then analyzes the results of these calculations and finds candidate muon sites."," If there are magnetic inequivalent sites not initially, they are recalculated"," It further calculates the muon contact hyperfine field at these candidate sites."],spec:{inputs:[{name:"sc_matrix",required:!0,valid_types:"List",info:" List of length 1 for supercell size "},{name:"structure",required:!0,valid_types:"StructureData",info:"Input initial structure"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"mu_spacing",required:!1,valid_types:"Float, NoneType",info:"Minimum distance in Angstrom between two starting muon positions generated on a grid."},{name:"qe",required:!1,valid_types:"",info:"Input parameters, settings and options for QE DFT calculations"}],outputs:[{name:"all_index_uuid",required:!0,valid_types:"Dict",info:""},{name:"all_sites",required:!0,valid_types:"Dict",info:""},{name:"unique_sites",required:!0,valid_types:"Dict",info:""},{name:"unique_sites_dipolar",required:!1,valid_types:"List",info:""},{name:"unique_sites_hyperfine",required:!1,valid_types:"Dict",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:405,message:"One of the PwRelaxWorkChain subprocesses failed"},{status:406,message:"One of the PwBaseWorkChain subprocesses failed"},{status:407,message:"One of the PPWorkChain subprocesses failed"}]},class:"aiida_muon.workflows.find_muon:FindMuonWorkChain"}}},commits_count:21,development_status:"pre-alpha",summaryinfo:[{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install git+https://github.com/positivemuon/aiida-muon",is_installable:"True"},"aiida-musconv":{entry_point_prefix:"musconv",code_home:"https://github.com/positivemuon/aiida-musconv",version_file:"https://github.com/positivemuon/aiida-musconv/blob/main/aiida_musconv/__init__.py",pip_url:"git+https://github.com/positivemuon/aiida-musconv",name:"aiida-musconv",package_name:"aiida_musconv",hosted_on:"github.com",metadata:{description:"aiida-musconv is a plugin that allows to obtain converged supercell size for an interstitial impurity calculation.",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Development Status :: 2 - Pre-Alpha","Framework :: AiiDA"],author:"Muon group Parma"},aiida_version:">=2.0,<3",entry_points:{"aiida.workflows":{musconv:{description:["WorkChain for finding converged supercell for interstitial impurity calculation"],spec:{inputs:[{name:"pwscf",required:!0,valid_types:"Data",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:"Input initial structure"},{name:"kpoints_distance",required:!1,valid_types:"Float, NoneType",info:"The minimum desired distance in 1/Å between k-points in reciprocal space."},{name:"max_iter_num",required:!1,valid_types:"Int, NoneType",info:"Maximum number of iteration in the supercell convergence loop"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"min_length",required:!1,valid_types:"Float, NoneType",info:"The minimum length of the smallest lattice vector for the first generated supercell "},{name:"pseudofamily",required:!1,valid_types:"Str, NoneType",info:"The label of the pseudo family"}],outputs:[{name:"Converged_SCmatrix",required:!0,valid_types:"ArrayData",info:""},{name:"Converged_supercell",required:!0,valid_types:"StructureData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:402,message:"one of the PwCalculation subprocesses failed"},{status:702,message:"Max number of supercell convergence reached "},{status:704,message:"Error in fitting the forces to an exponential"}]},class:"aiida_musconv.workflows.musconv:MusconvWorkChain"}}},commits_count:43,development_status:"pre-alpha",summaryinfo:[{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install git+https://github.com/positivemuon/aiida-musconv",is_installable:"True"},"aiida-nanotech-empa":{code_home:"https://github.com/nanotech-empa/aiida-nanotech-empa",development_status:"beta",entry_point_prefix:"nanotech_empa",pip_url:"git+https://github.com/nanotech-empa/aiida-nanotech-empa",name:"aiida-nanotech-empa",package_name:"aiida_nanotech_empa",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:30,summaryinfo:[],pip_install_cmd:"pip install git+https://github.com/nanotech-empa/aiida-nanotech-empa",is_installable:"False"},"aiida-nims-scheduler":{code_home:"https://github.com/atztogo/aiida-nims-scheduler",development_status:"stable",documentation_url:"https://github.com/atztogo/aiida-nims-scheduler",entry_point_prefix:"nims_scheduler",pip_url:"git+https://github.com/atztogo/aiida-nims-scheduler",name:"aiida-nims-scheduler",package_name:"aiida_nims_scheduler",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:21,summaryinfo:[],pip_install_cmd:"pip install git+https://github.com/atztogo/aiida-nims-scheduler",is_installable:"True"},"aiida-nwchem":{code_home:"https://github.com/aiidateam/aiida-nwchem",documentation_url:"https://aiida-nwchem.readthedocs.io/",entry_point_prefix:"nwchem",pip_url:"aiida-nwchem",plugin_info:"https://raw.githubusercontent.com/aiidateam/aiida-nwchem/master/setup.json",name:"aiida-nwchem",package_name:"aiida_nwchem",hosted_on:"github.com",metadata:{description:"The official AiiDA plugin for NWChem",author_email:"The AiiDA team ",classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering"],version:"3.0.0"},aiida_version:">=2.0,<3.0",entry_points:{"aiida.calculations":{"nwchem.base":{description:["Base calculation class for NWChem."],spec:{inputs:[{name:"input_file",required:!0,valid_types:"SinglefileData",info:"NWChem input file"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"restart_folder",required:!1,valid_types:"RemoteData, FolderData, NoneType",info:"Remote directory of a completed NWChem calculation to restart from."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The relaxed output structure."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Required output files are missing."},{status:301,message:"The retrieved temporary folder could not be accessed."},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:310,message:"The stdout output file could not be read."},{status:312,message:"The stdout output file was incomplete."},{status:313,message:"The stdout contains multiple calculations"},{status:340,message:"The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart."},{status:350,message:"The parser raised an unexpected exception."}]},class:"aiida_nwchem.calculations.nwchem:NwchemBaseCalculation"},"nwchem.nwchem":{description:["Base calculation class for NWChem.",""," Synthesizes NWChem input file from parameter dictionary and StructureData."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:"Input parameters"},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure, with or without a cell"},{name:"add_cell",required:!1,valid_types:"Bool",info:"The input structure, with or without a cell"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"restart_folder",required:!1,valid_types:"RemoteData, FolderData, NoneType",info:"Remote directory of a completed NWChem calculation to restart from."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The relaxed output structure."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Required output files are missing."},{status:301,message:"The retrieved temporary folder could not be accessed."},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:310,message:"The stdout output file could not be read."},{status:312,message:"The stdout output file was incomplete."},{status:313,message:"The stdout contains multiple calculations"},{status:340,message:"The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart."},{status:350,message:"The parser raised an unexpected exception."}]},class:"aiida_nwchem.calculations.nwchem:NwchemCalculation"}},"aiida.parsers":{"nwchem.nwchem":"aiida_nwchem.parsers.nwchem:NwchemBaseParser"},"aiida.workflows":{"nwchem.base":{description:["Workchain to run an NWChem calculation with automated error handling and restarts."],spec:{inputs:[{name:"nwchem",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The relaxed output structure."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_nwchem.workflows.base:NwchemBaseWorkChain"}}},commits_count:20,development_status:"beta",summaryinfo:[{colorclass:"blue",text:"Calculations",count:2},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install aiida-nwchem",is_installable:"True"},"aiida-open_circuit_voltage":{entry_point_prefix:"quantumespresso.ocv",code_home:"https://github.com/tsthakur/aiida-open_circuit_voltage",name:"aiida-open_circuit_voltage",package_name:"aiida_open_circuit_voltage",hosted_on:"github.com",metadata:{author:"Tushar Thakur",author_email:"tushar.thakur@epfl.ch",version:"0.1.0",description:"The AiiDA plugin to calculate ocv at various charge of states using QE",classifiers:["Framework :: AiiDA","License :: OSI Approved :: MIT License","Programming Language :: Python","Development Status :: 3 - Alpha","Natural Language :: English","Intended Audience :: Science/Research"]},aiida_version:">=1.1.0,<2.0.0",entry_points:{"aiida.workflows":{"quantumespresso.ocv.ocvwc":"aiida_open_circuit_voltage.workflows.workchain:OCVWorkChain"}},commits_count:27,development_status:"alpha",summaryinfo:[{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"See source code repository."},"aiida-optimize":{code_home:"https://github.com/greschd/aiida-optimize",documentation_url:"https://aiida-optimize.readthedocs.io",entry_point_prefix:"optimize",pip_url:"aiida-optimize",plugin_info:"https://raw.githubusercontent.com/greschd/aiida-optimize/master/setup.json",name:"aiida-optimize",package_name:"aiida_optimize",hosted_on:"github.com",metadata:{description:"AiiDA Plugin for running optimization algorithms.",author:"Dominik Gresch",author_email:"greschd@gmx.ch",license:"Apache 2.0",home_page:"https://aiida-optimize.readthedocs.io/",classifiers:["Development Status :: 5 - Production/Stable","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: Apache Software License","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Physics"],version:"1.0.2"},aiida_version:">=2.0.0,<3.0.0",entry_points:{"aiida.workflows":{"optimize.optimize":{description:["Runs an optimization procedure, given an optimization engine that defines the optimization"," algorithm, and a process which evaluates the function to be optimized."],spec:{inputs:[{name:"engine",required:!0,valid_types:"Str",info:"Engine that runs the optimization."},{name:"engine_kwargs",required:!0,valid_types:"Dict",info:"Keyword arguments passed to the optimization engine."},{name:"evaluate_process",required:!0,valid_types:"Str",info:"Process which produces the result to be optimized."},{name:"evaluate",required:!1,valid_types:"",info:"Inputs that are passed to all evaluation processes."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"optimal_process_output",required:!0,valid_types:"",info:"Output value of the optimal evaluation process."},{name:"optimal_process_uuid",required:!0,valid_types:"",info:"UUID of the optimal evaluation process."},{name:"engine_outputs",required:!1,valid_types:"",info:""},{name:"optimal_process_input",required:!1,valid_types:"",info:"Input value of the optimal evaluation process."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:201,message:"Optimization failed because one of the evaluate processes did not finish ok."},{status:202,message:"Optimization failed because the engine did not finish ok."}]},class:"aiida_optimize._optimization_workchain:OptimizationWorkChain"},"optimize.wrappers.add_inputs":{description:["Wrapper workchain that takes inputs as keys and values and passes it"," on to a sub-process. This enables taking a process which was not"," designed to be used in optimization, and optimize with respect to"," some arbitrary input. Inputs which always remain the same can be"," specified in the ``inputs`` namespace, whereas the inputs to be"," optimized are given through the ``added_input_keys`` and"," ``added_input_values`` inputs.",""," The outputs of the wrapper workchain are the same as those of"," the wrapped process.","",' The "added" inputs can only be BaseType sub-classes, or'," attributes of a Dict. For each input, its port location is given",' in the "added_input_keys" input. For example, ``x.y`` would set'," the ``y`` input in the ``x`` namespace.",""," For cases where the input is a Dict attribute, the (possibly nested) attribute name is given after a colon. That means ``x:a.b`` would"," set the ``['a']['b']`` attribute of the ``Dict`` given in the ``x``"," input.",""," In cases where only a single input needs to be added, they can be"," specified directly instead of wrapped in a List."],spec:{inputs:[{name:"added_input_keys",required:!0,valid_types:"List, Str",info:"Specifies the location of each added input."},{name:"added_input_values",required:!0,valid_types:"List, BaseType",info:"Values of the added inputs to be passed into the sub-process."},{name:"sub_process",required:!0,valid_types:"Str",info:"The class of the process that should be wrapped."},{name:"inputs",required:!1,valid_types:"",info:"Inputs to be passed on to the sub-process."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:201,message:"Workchain failed because the sub-process did not finish ok."}]},class:"aiida_optimize.wrappers._add_inputs:AddInputsWorkChain"},"optimize.wrappers.concatenate":{description:["Allows concatenating an arbitrary number of sub-processes.",""," A wrapper workchain that allows concatenating an arbitrary number"," of sub-processes. Outputs of one processes can be configured to"," be passed to the next one."],spec:{inputs:[{name:"output_input_mappings",required:!0,valid_types:"List",info:"Defines how inputs are passed between sub-processes. Each list entry entry has the form `((process_label_a, process_label_b), mapping)`, and defines outputs of process A to be passed to process B. The `mapping` values are dictionaries `{'output_name': 'input_name'}` giving the output name (in process A) and input name (in process B) for each value to pass."},{name:"process_inputs",required:!0,valid_types:"",info:"Inputs which are passed on to the sub-processes. The inputs should be grouped into a namespace identified by the process label."},{name:"process_labels",required:!0,valid_types:"List",info:"A list of pairs (label, process_name). The labels can be any string, the process_name needs to be loadable by `aiida_optimize.process_inputs.load_object`, and defines which process is being run."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"process_outputs",required:!0,valid_types:"",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:200,message:"Workchain failed because a sub-process failed."}]},class:"aiida_optimize.wrappers._concatenate:ConcatenateWorkChain"},"optimize.wrappers.create_evaluate":{description:["Wrapper workchain to combine two processes: The first process _creates_"," a result, and the second _evaluates_ that result.",""," The purpose of this workchain is to facilitate optimization of processes"," which don't natively produce an output that can be optimized, by only"," having to add the 'evaluation' part."],spec:{inputs:[{name:"create",required:!0,valid_types:"",info:"Inputs which are passed on to the create sub-process."},{name:"create_process",required:!0,valid_types:"Str",info:"The sub-process which performs the create step."},{name:"evaluate_process",required:!0,valid_types:"Str",info:"The sub-process which performs the evaluate step."},{name:"output_input_mapping",required:!0,valid_types:"Dict",info:"A mapping from output names of the create process to input names of the evaluate process. These outputs (if present) are forwarded to the evaluate process."},{name:"evaluate",required:!1,valid_types:"",info:"Inputs which are passed on to the evaluate sub-process."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"create",required:!0,valid_types:"",info:""},{name:"evaluate",required:!0,valid_types:"",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:201,message:"Workchain failed because the 'create' sub-process failed."},{status:202,message:"Workchain failed because the 'evaluate' sub-process failed."}]},class:"aiida_optimize.wrappers._create_evaluate:CreateEvaluateWorkChain"}}},commits_count:2,development_status:"stable",summaryinfo:[{colorclass:"green",text:"Workflows",count:4}],pip_install_cmd:"pip install aiida-optimize",is_installable:"True"},"aiida-orca":{code_home:"https://github.com/pzarabadip/aiida-orca",development_status:"stable",documentation_url:"https://aiida-orca.readthedocs.io/",entry_point_prefix:"orca",pip_url:"git+https://github.com/pzarabadip/aiida-orca",name:"aiida-orca",package_name:"aiida_orca",hosted_on:"github.com",metadata:{author:"Pezhman Zarabadi-Poor",author_email:"pzarabadip@gmail.com",version:"0.5.1",description:"AiiDA plugin for ORCA code",classifiers:["Environment :: Plugins","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Framework :: AiiDA"]},aiida_version:">=1.0.0,<2.0.0",entry_points:{"aiida.calculations":{orca_main:"aiida_orca.calculations:OrcaCalculation",orca_asa:"aiida_orca.calculations:OrcaAsaCalculation"},"aiida.parsers":{orca_base_parser:"aiida_orca.parsers:OrcaBaseParser"},"aiida.workflows":{"orca.base":{description:["Workchain to run a orca calculation with automated error handling and restarts."],spec:{inputs:[{name:"orca",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"the results of the calculation"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"relaxed_structure",required:!1,valid_types:"StructureData",info:"relaxed structure"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:300,message:"The calculation failed with an unidentified unrecoverable error."},{status:301,message:"The sub process excepted."},{status:301,message:"The calculation failed with an unrecoverable error coming from aiida-orca."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_orca.workchains:OrcaBaseWorkChain"}}},commits_count:42,summaryinfo:[{colorclass:"blue",text:"Calculations",count:2},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install git+https://github.com/pzarabadip/aiida-orca",is_installable:"True"},"aiida-phonopy":{code_home:"https://github.com/aiida-phonopy/aiida-phonopy",documentation_url:"https://aiida-phonopy.readthedocs.io/",entry_point_prefix:"phonopy",pip_url:"aiida-phonopy",plugin_info:"https://raw.githubusercontent.com/aiida-phonopy/aiida-phonopy/master/setup.json",name:"aiida-phonopy",package_name:"aiida_phonopy",hosted_on:"github.com",metadata:{description:"The official AiiDA plugin for Phonopy",author_email:"Lorenzo Bastonero ",classifiers:["Development Status :: 5 - Production/Stable","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Chemistry","Topic :: Scientific/Engineering :: Physics"],version:"1.1.3"},aiida_version:">=2.0.0,<3.0.0",entry_points:{"aiida.calculations":{"phonopy.phonopy":{description:["Base `CalcJob` implementation for Phonopy post-processing."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:`Phonopy parameters (\`setting tags\`) for post processing. The following tags, along their type, are allowed: +PRIMITIVE_AXES +PRIMITIVE_AXIS +EIGENVECTORS +BAND +BAND_PATHS +BAND_POINTS +BAND_LABELS +BAND_CONNECTION +BAND_INDICES +MESH +MP +MESH_NUMBERS +MP_SHIFT +GAMMA_CENTER +WRITE_MESH +DOS +DOS_RANGE +FMIN +FMAX +FPITCH +PDOS +PROJECTION_DIRECTION +XYZ_DIRECTION +SIGMA +DEBYE_MODEL +MOMEMT +MOMENT_ORDER +TPROP +TMIN +TMAX +TSTEP +PRETEND_REAL +CUTOFF_FREQUENCY +TDISP +TDISPMAT +TDISPMAT_CIF +QPOINTS +WRITEDM +NAC_METHOD +Q_DIRECTION +GROUP_VELOCITY +GV_DELTA_Q +SYMMETRY_TOLERANCE +SYMMETRY +MESH_SYMMETRY +FC_SYMMETRY +FULL_FORCE_CONSTANTS +WRITE_FORCE_CONSTANTS +ANIME_TYPE +ANIME +MODULATION +IRREPS +SHOW_IRREPS +LITTLE_COGROUP`},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"force_constants",required:!1,valid_types:"ForceConstantsData, NoneType",info:"Force constants of the input structure."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"phonopy_data",required:!1,valid_types:"PhonopyData, NoneType",info:"The preprocess output info of a previous ForceConstantsWorkChain."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Settings for phonopy calculation."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"irreducible_representations",required:!1,valid_types:"Dict",info:"Irreducible representation output."},{name:"modulation",required:!1,valid_types:"Dict",info:"Modulation information."},{name:"output_force_constants",required:!1,valid_types:"ArrayData",info:"Calculated force constants."},{name:"output_parameters",required:!1,valid_types:"Dict",info:"Sum up info of phonopy calculation."},{name:"phonon_bands",required:!1,valid_types:"BandsData",info:"Calculated phonon band structure."},{name:"projected_phonon_dos",required:!1,valid_types:"XyData",info:"Calculated projected DOS."},{name:"qpoints",required:!1,valid_types:"BandsData",info:"Calculated qpoints."},{name:"qpoints_mesh",required:!1,valid_types:"BandsData",info:"Calculated qpoint mesh."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"thermal_displacement_matrices",required:!1,valid_types:"Dict",info:"Calculated thermal displacements matrices."},{name:"thermal_displacements",required:!1,valid_types:"Dict",info:"Calculated thermal displacements."},{name:"thermal_properties",required:!1,valid_types:"XyData",info:"Calculated thermal properties."},{name:"total_phonon_dos",required:!1,valid_types:"XyData",info:"Calculated total DOS."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:301,message:"The retrieved temporary folder could not be accessed."},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:303,message:"The retrieved folder did not contain the required phonopy file."},{status:304,message:"The retrieved folder did not contain one or more expected output files."},{status:305,message:"No run mode has been selected."},{status:310,message:"The stdout output file could not be read."},{status:311,message:"The stdout output file could not be parsed."},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:320,message:"The loading of yaml file got an unexpected error."},{status:321,message:"The file loading via numpy got an unexpected error."},{status:350,message:"The parser raised an unexpected exception."},{status:400,message:"The parser was not able to parse one or more files."}]},class:"aiida_phonopy.calculations.phonopy:PhonopyCalculation"}},"aiida.data":{"phonopy.force_constants":"aiida_phonopy.data.force_constants:ForceConstantsData","phonopy.phonopy":"aiida_phonopy.data.phonopy:PhonopyData","phonopy.preprocess":"aiida_phonopy.data.preprocess:PreProcessData","phonopy.raw":"aiida_phonopy.data.raw:RawData"},"aiida.parsers":{"phonopy.phonopy":"aiida_phonopy.parsers.phonopy:PhonopyParser"},"aiida.workflows":{"phonopy.phonopy":{description:["Abstract workflow for automated frozen phonons.",""," Phonopy is used to produce structures with displacements,"," while the forces are calculated with a quantum engine of choice.",""," This workchain is meant to be used as a base for other specific force calculato plugin workchains,"," or as an example on how to set a possible workchain/workflow. For this reason, the outline of"," this class is not defined, while it provides the inputs and a `setup` method, which can be used"," in a specific workflow outline. Ideally, the workflow would look like:",""," 1. Setup the preprocess data.",""," This is already provided in this class. It setups a `PreProcessData` node, from where"," supercell, primitive cell and supercells with displacements can be easily extracted using"," the methods of the nodes. This node can be taken from `self.ctx.preprocess_data`, and used"," during the outline of the workflow.",""," 2. Run supercells using the selected quantum engine/force calculator code.",""," In specific code implementations, a force calculation on supercells needs to be run."," To get these supercells, one need simply to run:",""," ```self.ctx.preprocess_data.calcfunctions.get_supercells_with_displacements()```",""," This will return a dictionary with all the supercells as StructureData to run for the phonon calculation."," The keys of this dictionary are of the type `supercell_{number}`, where `number` is an integer."," These numbers are essentials since the `phonopy` force sets is generated following these numbers,"," in order to make sure to refer to the correct displacement. Thus, it is required to keep track"," of them."," Moreover,a calculation over the pristine supercell structure should be run before hand as reference."," This structure can instead be gotten via:",""," ```self.ctx.preprocess_data.calcfunctions.get_supercell()```",""," This will return a StructureData without any label.",""," For an example of implementation, refer to aiidateam/aiida-common-worfklows.",""," * Note: some type of force calculation needs to map some variables from the unitcell to the supercell"," (and in certain case even the primitive cell), e.g. the atomic spin in VASP. Since this is code dependent,"," you will need to map these parameters before launching the force calculation of a certain supercell"," with displacement. This information can be gotten via:",""," ```self.ctx.preprocess_data.get_cells_mappings()```",""," Moreover, consider that cells in phonopy will always (re)fold the atoms in order to have positive coordinates.",""," 3. Inspect all runs and expose the forces and energies (not mandatory) outputs.",""," * Suggested: when the calculation on each supercell has finished (correctly)"," expose the output forces (and energies) in the dynamical `supercells_forces(energies)` namespace(s)."," Provide each supercell forces as an `ArrayData` with the forces stored as `forces`"," (e.g. if your code plugin stores the forces in `TrajectoryData`, extract them with a `calcfunction`)."," Expose each `ArrayData` choosing a **common prefix**, while as **suffix use"," _{number}**, with `{number}` referring to the correspective supercell label suffix (that you are supposed to"," keep track somewhere, e.g. in the label of the code calculation/workchain)."," Now you can gather all the information in one data noe, i.e. in a `PhonopyData` node."," To do so, you can simple run:",""," ```self.ctx.preprocess_data.calcfunctions.generate_phonopy_data(**self.outputs.supercells_forces)```",""," and then expose it as output in the `output_phonopy_data` namespace.",""," * Alternatively: instead of exposing the supercell forces as outputs, you can directly gather all the forces"," in a dictionary and run directly to the `generate_phonopy_data` method using this dictionary (always using"," the double *).",""," See the implementation in aiidateam/aiida-common-workflows for an example.",""," 4. (optional) Run the non-analytical constants on the primitive cell.",""," Non-analytical constants should be run for polar insulators. These require usually a linear response code"," or a finite difference approach (e.g. using the electric enthalpy). Since this is usually the most expensive"," part, you should run them on the primitive cell. To get it, use:",""," ```self.ctx.preprocess_data.calcfunctions.get_primitive_cell()```",""," If you compute also these, collect the dielectric tensor and the effectic born charges in an ArrayData,"," with the arraynames `dielectric` and `born_charges` (in Cartesian coordinates!)."," Then, gather all the information of nac and forces in a unique `PhonopyData` via:",""," ```"," self.ctx.preprocess_data.calcfunctions.generate_phonopy_data("," nac_parameters=nac_paramters,"," **self.outputs.supercells_forces"," )"," ```",""," and expose the output.",""," * Note: we require in the input for generating the full phonopy data, to give the nac in the primitive cell."," The primitive cell of phonopy will just rotate the lattice vectors, thus mantaining the Cartasian coordinate"," system. It can happen, though, that the unitcell is not the primitive cell of the system, meaning that the"," primitive cell will contain less atoms. We expect in input the nac computed on this number of atoms. If you"," want, for some reason, compute the nac on the unitcell, you will need to get the reduced nac."," To do so, you can consider using a built-in function in phonopy, namely:",""," :py:func:`phonopy.structure.symmetry.elaborate_borns_and_epsilon`"],spec:{inputs:[{name:"options",required:!0,valid_types:"",info:"Options for how to run the workflow."},{name:"displacement_generator",required:!1,valid_types:"Dict, NoneType",info:`Info for displacements generation. The following flags are allowed: + distance + is_plusminus + is_diagonal + is_trigonal + number_of_snapshots + random_seed + temperature + cutoff_frequency`},{name:"fc_options",required:!1,valid_types:"Dict, NoneType",info:`Options for force constants calculation (optional). The following flags are allowed: + calculate_full_force_constants + fc_calculator + fc_calculator_options`},{name:"is_symmetry",required:!1,valid_types:"Bool, NoneType",info:"Whether using or not the space group symmetries."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"nac_parameters",required:!1,valid_types:"ArrayData, NoneType",info:"Non-analytical parameters."},{name:"preprocess_data",required:!1,valid_types:"PhonopyData, PreProcessData, NoneType",info:"The preprocess data for frozen phonon calcualtion."},{name:"primitive_matrix",required:!1,valid_types:"List, NoneType",info:"The matrix used to generate the primitive cell from the input structure in the List format. Allowed shapes are 3x1 and 3x3 lists."},{name:"structure",required:!1,valid_types:"StructureData, NoneType",info:"The structure at equilibrium volume."},{name:"supercell_matrix",required:!1,valid_types:"List, NoneType",info:"The matrix used to generate the supercell from the input structure in the List format. Allowed shapes are 3x1 and 3x3 lists."},{name:"symmetry_tolerance",required:!1,valid_types:"Float, NoneType",info:"Symmetry tolerance for space group analysis on the input structure."}],outputs:[{name:"output_phonopy_data",required:!0,valid_types:"PhonopyData",info:"The phonopy data with supercells displacements, forces and (optionally)nac parameters to use in the post-processing calculation."},{name:"supercells_forces",required:!0,valid_types:"ArrayData",info:"The forces acting on the atoms of each supercell."},{name:"output_force_constants",required:!1,valid_types:"ForceConstantsData",info:"The matrix of force constants computed with finite displacements."},{name:"supercells",required:!1,valid_types:"StructureData",info:"The supercells with displacements."},{name:"supercells_energies",required:!1,valid_types:"Float",info:"The total energy of each supercell."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_phonopy.workflows.phonopy:PhonopyWorkChain"}}},commits_count:66,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:4},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install aiida-phonopy",is_installable:"True"},"aiida-phtools":{code_home:"https://github.com/ltalirz/aiida-phtools",entry_point_prefix:"phtools",pip_url:"aiida-phtools",plugin_info:"https://raw.github.com/ltalirz/aiida-phtools/master/setup.json",name:"aiida-phtools",package_name:"aiida_phtools",hosted_on:"github.com",metadata:{description:"AiiDA plugin for persistence homology tools, used to analyze nanoporous materials.",author:"Leopold Talirz",author_email:"leopold.talirz@gmail.com",license:"MIT",home_page:"https://github.com/ltalirz/aiida-phtools",classifiers:["Programming Language :: Python"],version:"0.1.0a1"},aiida_version:"*",entry_points:{"aiida.calculations":{"phtools.dmatrix":"aiida_phtools.calculations.distance_matrix:DistanceMatrixCalculation","phtools.surface":"aiida_phtools.calculations.pore_surface:PoreSurfaceCalculation"},"aiida.data":{"phtools.surface":"aiida_phtools.data.pore_surface:PoreSurfaceParameters"},"aiida.parsers":{"phtools.dmatrix":"aiida_phtools.parsers.distance_matrix:DistanceMatrixParser","phtools.surface":"aiida_phtools.parsers.pore_surface:PoreSurfaceParser"}},commits_count:0,development_status:"planning",summaryinfo:[{colorclass:"blue",text:"Calculations",count:2},{colorclass:"brown",text:"Parsers",count:2},{colorclass:"red",text:"Data",count:1}],pip_install_cmd:"pip install --pre aiida-phtools"},"aiida-plumed":{code_home:"https://github.com/ConradJohnston/aiida-plumed",entry_point_prefix:"plumed",pip_url:"aiida-plumed",plugin_info:"https://raw.github.com/ConradJohnston/aiida-plumed/AiiDA-v1.0-compatibility/setup.json",name:"aiida-plumed",package_name:"aiida_plumed",hosted_on:"github.com",metadata:{description:"AiiDA plugin providing support for Plumed2",author:"Conrad Johnston",author_email:"conrad.s.johnston@googlemail.com",license:"MIT",home_page:"https://github.com/ConradJohnston/aiida-plumed",classifiers:["Development Status :: 2 - Pre-Alpha","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python"],version:"0.1.0a0"},aiida_version:">=1.0.0b3,<2.0.0",entry_points:{"aiida.calculations":{plumed:"aiida_plumed.calculations:DiffCalculation"},"aiida.cmdline.data":{plumed:"aiida_plumed.cli:data_cli"},"aiida.data":{plumed:"aiida_plumed.data:DiffParameters"},"aiida.parsers":{plumed:"aiida_plumed.parsers:DiffParser"}},commits_count:0,development_status:"pre-alpha",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:1},{colorclass:"orange",text:"Other (Data commands)",count:1}],pip_install_cmd:"pip install --pre aiida-plumed",is_installable:"True"},"aiida-porousmaterials":{code_home:"https://github.com/pzarabadip/aiida-porousmaterials",development_status:"stable",entry_point_prefix:"porousmaterials",pip_url:"aiida-porousmaterials",name:"aiida-porousmaterials",package_name:"aiida_porousmaterials",hosted_on:"github.com",metadata:{description:"AiiDA plugin for PorousMaterials code",author:"Pezhman Zarabadi-Poor",author_email:"pzarabadip@gmail.com",license:"MIT",home_page:"https://github.com/pzarabadip/aiida-porousmaterials",classifiers:["Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8"],version:"1.0.0a3"},aiida_version:null,entry_points:{"aiida.calculations":{porousmaterials:{description:["This is PorousMaterialsCalculation as the subclass"," of AiiDA CalcJob to prepare input for the PorousMaterials"," suite of Julia codes."," Please refer to : https://github.com/SimonEnsemble/PorousMaterials.jl"],spec:{inputs:[{name:"acc_voronoi_nodes",required:!0,valid_types:"SinglefileData",info:"Accessible Voronoi nodes calculated by Zeo++"},{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"Dict",info:"parameters such as cutoff and mixing rules."},{name:"structure",required:!0,valid_types:"CifData",info:"Framework input file as CIF"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"settings",required:!1,valid_types:"Dict",info:"Additional input parameters"}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"dictionary of calculated Voronoi energies"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"ev_output_file",required:!1,valid_types:"SinglefileData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The retrieved folder data node could not be accessed."},{status:101,message:"The retrieved folder does not contain an output file."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."}]},class:"aiida_porousmaterials.calculations:PorousMaterialsCalculation"}},"aiida.parsers":{porousmaterials:"aiida_porousmaterials.parser:PorousMaterialsParser"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install --pre aiida-porousmaterials",is_installable:"True"},"aiida-pseudo":{code_home:"https://github.com/aiidateam/aiida-pseudo",entry_point_prefix:"pseudo",pip_url:"aiida-pseudo",plugin_info:"https://raw.github.com/aiidateam/aiida-pseudo/master/setup.cfg",name:"aiida-pseudo",package_name:"aiida_pseudo",hosted_on:"github.com",metadata:{description:"AiiDA plugin that simplifies working with pseudo potentials.",author_email:'"Sebastiaan P. Huber" ',classifiers:["Development Status :: 5 - Production/Stable","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],version:"1.1.0"},aiida_version:">=2.1,<3.0",entry_points:{"aiida.data":{pseudo:"aiida_pseudo.data.pseudo.pseudo:PseudoPotentialData","pseudo.jthxml":"aiida_pseudo.data.pseudo.jthxml:JthXmlData","pseudo.psf":"aiida_pseudo.data.pseudo.psf:PsfData","pseudo.psml":"aiida_pseudo.data.pseudo.psml:PsmlData","pseudo.psp8":"aiida_pseudo.data.pseudo.psp8:Psp8Data","pseudo.upf":"aiida_pseudo.data.pseudo.upf:UpfData","pseudo.vps":"aiida_pseudo.data.pseudo.vps:VpsData"},"aiida.groups":{"pseudo.family":"aiida_pseudo.groups.family.pseudo:PseudoPotentialFamily","pseudo.family.cutoffs":"aiida_pseudo.groups.family.cutoffs:CutoffsPseudoPotentialFamily","pseudo.family.pseudo_dojo":"aiida_pseudo.groups.family.pseudo_dojo:PseudoDojoFamily","pseudo.family.sssp":"aiida_pseudo.groups.family.sssp:SsspFamily"},console_scripts:{"aiida-pseudo":"aiida_pseudo.cli:cmd_root"}},commits_count:24,development_status:"stable",summaryinfo:[{colorclass:"red",text:"Data",count:7},{colorclass:"purple",text:"Console scripts",count:1},{colorclass:"orange",text:"Other (Groups)",count:4}],pip_install_cmd:"pip install aiida-pseudo",is_installable:"True"},"aiida-psi4":{code_home:"https://github.com/ltalirz/aiida-psi4/tree/master",development_status:"beta",entry_point_prefix:"psi4",pip_url:"git+https://github.com/ltalirz/aiida-psi4",name:"aiida-psi4",package_name:"aiida_psi4",hosted_on:"github.com",metadata:{author:"Leopold Talirz",author_email:"leopold.talirz@gmail.com",version:"0.1.0a0",description:"AiiDA plugin for the Psi4 Quantum Chemistry package.",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Framework :: AiiDA"]},aiida_version:">=1.6.4,<2.0.0",entry_points:{"aiida.data":{"psi4.atomic_input":"aiida_psi4.data:AtomicInput"},"aiida.calculations":{psi4:{description:["AiiDA calculation plugin wrapping the psi4 executable."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"psiapi",required:!1,valid_types:"Str, SinglefileData",info:"Psi4 input in PsiAPI python format"},{name:"qcschema",required:!1,valid_types:"Dict, AtomicInput",info:"Psi4 input in QCSchema JSON format"}],outputs:[{name:"qcschema",required:!0,valid_types:"Dict",info:"Psi4 output in QCSchema JSON format"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"stdout",required:!0,valid_types:"SinglefileData",info:"Psi4 logfile"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:100,message:"Calculation did not produce all expected output files."},{status:101,message:"Psi4 reported calculation as unsuccessful."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."}]},class:"aiida_psi4.calculations:Psi4Calculation"}},"aiida.parsers":{psi4:"aiida_psi4.parsers:QCSchemaParser"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:1}],pip_install_cmd:"pip install git+https://github.com/ltalirz/aiida-psi4",is_installable:"True"},"aiida-pyscf":{code_home:"https://github.com/microsoft/aiida-pyscf",entry_point_prefix:"pyscf",pip_url:"aiida-pyscf",plugin_info:"https://github.com/microsoft/aiida-pyscf/blob/main/pyproject.toml",name:"aiida-pyscf",package_name:"aiida_pyscf",hosted_on:"github.com",metadata:{description:"AiiDA plugin for the Python-based Simulations of Chemistry Framework (PySCF).",author_email:'"Sebastiaan P. Huber" , Adam Grofe ',classifiers:["Development Status :: 3 - Alpha","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering"],version:"0.4.0"},aiida_version:">=2.3,<3.0",entry_points:{"aiida.calculations":{"pyscf.base":{description:["``CalcJob`` plugin for PySCF."],spec:{inputs:[{name:"code",required:!0,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"structure",required:!0,valid_types:"StructureData",info:"Input structure with molecular structure definition."},{name:"checkpoint",required:!1,valid_types:"SinglefileData, NoneType",info:"Checkpoint of a previously completed calculation that failed to converge."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Input parameters used to render the PySCF script template."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"cubegen",required:!0,valid_types:"",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"checkpoint",required:!1,valid_types:"SinglefileData",info:"The checkpoint file in case the calculation did not converge. Can be used as an input for a restart."},{name:"fcidump",required:!1,valid_types:"SinglefileData",info:"Computed fcidump files."},{name:"parameters",required:!1,valid_types:"Dict",info:"Various computed properties parsed from the `FILENAME_RESULTS` output file."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"structure",required:!1,valid_types:"StructureData",info:"The optimized structure if the input parameters contained the `optimizer` key."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The geometry optimization trajectory if the input parameters contained the `optimizer` key."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:302,message:"The stdout output file was not retrieved."},{status:303,message:"The results JSON file was not retrieved."},{status:410,message:"The electronic minimization cycle did not reach self-consistency."},{status:500,message:"The ionic minimization cycle did not converge for the given thresholds."}]},class:"aiida_pyscf.calculations.base:PyscfCalculation"}},"aiida.parsers":{"pyscf.base":"aiida_pyscf.parsers.base:PyscfParser"},"aiida.workflows":{"pyscf.base":{description:["Workchain to run a pyscf calculation with automated error handling and restarts."],spec:{inputs:[{name:"pyscf",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"cubegen",required:!0,valid_types:"",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"checkpoint",required:!1,valid_types:"SinglefileData",info:"The checkpoint file in case the calculation did not converge. Can be used as an input for a restart."},{name:"fcidump",required:!1,valid_types:"SinglefileData",info:"Computed fcidump files."},{name:"parameters",required:!1,valid_types:"Dict",info:"Various computed properties parsed from the `FILENAME_RESULTS` output file."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"structure",required:!1,valid_types:"StructureData",info:"The optimized structure if the input parameters contained the `optimizer` key."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The geometry optimization trajectory if the input parameters contained the `optimizer` key."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:300,message:"The calculation failed with an unrecoverable error."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:310,message:"The calculation failed and did not retrieve a checkpoint file from which can be restarted."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_pyscf.workflows.base:PyscfBaseWorkChain"}}},commits_count:63,development_status:"alpha",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install aiida-pyscf",is_installable:"True"},"aiida-python":{entry_point_prefix:"aiidapython",code_home:"https://github.com/addman2/aiida-python",name:"aiida-python",package_name:"aiida_python",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:59,development_status:"planning",summaryinfo:[],pip_install_cmd:"See source code repository."},"aiida-qeq":{code_home:"https://github.com/ltalirz/aiida-qeq",development_status:"stable",entry_point_prefix:"qeq",pip_url:"aiida-qeq",plugin_info:"https://raw.githubusercontent.com/ltalirz/aiida-qeq/master/setup.json",name:"aiida-qeq",package_name:"aiida_qeq",hosted_on:"github.com",metadata:{description:"AiiDA plugin for computing electronic charges on atoms using equilibration-type models (QEq, EQEq, ...).",author:"Leopold Talirz, Daniele Ongari",author_email:"leopold.talirz@gmail.com",license:"MIT",home_page:"https://github.com/ltalirz/aiida-qeq",classifiers:["Programming Language :: Python"],version:"0.1.0"},aiida_version:">=0.12.2,<1.0.0",entry_points:{"aiida.calculations":{"qeq.eqeq":{description:["AiiDA calculation plugin for the EQeq code."],spec:{inputs:[{name:"charge_data",required:!0,valid_types:"SinglefileData",info:"File containing information on common oxidation state of the elements."},{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"ionization_data",required:!0,valid_types:"SinglefileData",info:"File containing ionization data on the elements."},{name:"parameters",required:!0,valid_types:"EQeqParameters",info:"Command line parameters for EQEQ"},{name:"structure",required:!0,valid_types:"CifData",info:"Input structure, for which atomic charges are to be computed."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."}]},class:"aiida_qeq.calculations.eqeq:EQeqCalculation"},"qeq.qeq":{description:["AiiDA calculation plugin for the Qeq code."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"SinglefileData",info:"File containing electronegativity and Idempotential data of the elements."},{name:"structure",required:!0,valid_types:"CifData",info:"Input structure, for which atomic charges are to be computed."},{name:"configure",required:!1,valid_types:"QeqParameters",info:"Configuration input for QEQ (configure.input file)"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."}]},class:"aiida_qeq.calculations.qeq:QeqCalculation"}},"aiida.data":{"qeq.eqeq":"aiida_qeq.data.eqeq:EQeqParameters","qeq.qeq":"aiida_qeq.data.qeq:QeqParameters"},"aiida.parsers":{"qeq.eqeq":"aiida_qeq.parsers.eqeq:EQeqParser","qeq.qeq":"aiida_qeq.parsers.qeq:QeqParser"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:2},{colorclass:"brown",text:"Parsers",count:2},{colorclass:"red",text:"Data",count:2}],pip_install_cmd:"pip install aiida-qeq",is_installable:"True"},"aiida-qp2":{code_home:"https://github.com/TREX-CoE/aiida-qp2",entry_point_prefix:"qp2",pip_url:"aiida-qp2",documentation_url:"https://trex-coe.github.io/aiida-qp2/index.html",name:"aiida-qp2",package_name:"aiida_qp2",hosted_on:"github.com",metadata:{description:"AiiDA plugin for the Quantum Package 2.0",author:"Evgeny Posenitskiy",author_email:"posenitskiy@irsamc.ups-tlse.fr",license:"MIT",home_page:"https://github.com/TREX-CoE/aiida-qp2",classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Programming Language :: Python"],version:"0.2.0"},aiida_version:null,entry_points:{"aiida.calculations":{qp2:{description:["AiiDA calculation plugin wrapping the Quantum Package code."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:"Input parameters to generate the input file."},{name:"basissets",required:!1,valid_types:"",info:"A dictionary of basissets to be used in the calculations: key is the atomic symbol, value is either a single basisset."},{name:"code",required:!1,valid_types:"Code",info:"The `Code` to use for this job."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"pseudos",required:!1,valid_types:"",info:"A dictionary of pseudopotentials to be used in the calculations: key is the atomic symbol, value is a single pseudopotential."},{name:"settings",required:!1,valid_types:"Dict",info:"Additional input parameters."},{name:"structure",required:!1,valid_types:"StructureData",info:"Input structrure"},{name:"wavefunction",required:!1,valid_types:"SinglefileData",info:"The wavefunction file (EZFIO or TREXIO)."}],outputs:[{name:"output_wavefunction",required:!0,valid_types:"SinglefileData",info:"The wave function file (EZFIO or TREXIO)"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_energy",required:!1,valid_types:"Float",info:"The result of the calculation"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The retrieved folder data node could not be accessed."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:300,message:"Calculation did not produce all expected output files."},{status:400,message:"Energy value is not present in the output file."}]},class:"aiida_qp2.calculations:QP2Calculation"}},"aiida.parsers":{qp2:"aiida_qp2.parsers:QP2Parser"}},commits_count:0,development_status:"beta",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install aiida-qp2",is_installable:"True"},"aiida-quantumespresso":{code_home:"https://github.com/aiidateam/aiida-quantumespresso",documentation_url:"https://aiida-quantumespresso.readthedocs.io/",entry_point_prefix:"quantumespresso",pip_url:"aiida-quantumespresso",plugin_info:"https://raw.github.com/aiidateam/aiida-quantumespresso/master/setup.json",name:"aiida-quantumespresso",package_name:"aiida_quantumespresso",hosted_on:"github.com",metadata:{description:"The official AiiDA plugin for Quantum ESPRESSO",author_email:"The AiiDA team ",classifiers:["Development Status :: 5 - Production/Stable","Framework :: AiiDA","License :: OSI Approved :: MIT License","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],version:"4.4.0"},aiida_version:">=2.3,<3.0",entry_points:{"aiida.calculations":{"quantumespresso.cp":{description:["`CalcJob` implementation for the cp.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:"The input parameters that are to be used to construct the input file."},{name:"pseudos",required:!0,valid_types:"UpfData, UpfData",info:"A mapping of `UpfData` nodes onto the kind name to which they should apply."},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parallelization",required:!1,valid_types:"Dict, NoneType",info:`Parallelization options. The following flags are allowed: +`},{name:"parent_folder",required:!1,valid_types:"RemoteData, NoneType",info:"An optional working directory of a previously completed calculation to restart from."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Optional parameters to affect the way the calculation job and the parsing are performed."},{name:"vdw_table",required:!1,valid_types:"SinglefileData, NoneType",info:"Optional van der Waals table contained in a `SinglefileData`."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"output_trajectory",required:!0,valid_types:"TrajectoryData",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:301,message:"The retrieved temporary folder could not be accessed."},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:303,message:"The required XML file is not present in the retrieved folder."},{status:304,message:"The retrieved folder contains multiple XML files."},{status:310,message:"The stdout output file could not be read."},{status:311,message:"The stdout output file could not be parsed."},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:320,message:"The required XML file could not be read."},{status:330,message:"The required POS file could not be read."},{status:340,message:"The required trajectory data could not be read."},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."}]},class:"aiida_quantumespresso.calculations.cp:CpCalculation"},"quantumespresso.create_kpoints_from_distance":{description:["Generate a uniformly spaced kpoint mesh for a given structure.",""," The spacing between kpoints in reciprocal space is guaranteed to be at least the defined distance.",""," :param structure: the StructureData to which the mesh should apply"," :param distance: a Float with the desired distance between kpoints in reciprocal space"," :param force_parity: a Bool to specify whether the generated mesh should maintain parity"," :returns: a KpointsData with the generated mesh"],spec:{inputs:[{name:"distance",required:!0,valid_types:"Data",info:"a Float with the desired distance between kpoints in reciprocal space"},{name:"force_parity",required:!0,valid_types:"Data",info:"a Bool to specify whether the generated mesh should maintain parity"},{name:"structure",required:!0,valid_types:"Data",info:"the StructureData to which the mesh should apply"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_quantumespresso.calculations.functions.create_kpoints_from_distance:create_kpoints_from_distance"},"quantumespresso.create_magnetic_configuration":{description:["Create a new magnetic configuration from the given structure based on a list of magnetic moments per site.",""," To create the new list of kinds, the algorithm loops over all the elements in the structure and makes a list of the"," sites with that element and their corresponding magnetic moment. Next, it splits this list in three lists:",""," * Zero magnetic moments: Any site that has an absolute magnetic moment lower than ``ztol``"," * Positive magnetic moments"," * Negative magnetic moments",""," The algorithm then sorts the positive and negative lists from large to small absolute value, and loops over each of"," list. New magnetic kinds will be created when the absolute difference between the magnetic moment of the current"," kind and the site exceeds ``atol``.",""," The positive and negative magnetic moments are handled separately to avoid assigning two sites with opposite signs"," in their magnetic moment to the same kind and make sure that each kind has the correct magnetic moment, i.e. the"," largest magnetic moment in absolute value of the sites corresponding to that kind.",""," .. important:: the function currently does not support alloys.",""," :param structure: a `StructureData` instance."," :param magnetic_moment_per_site: list of magnetic moments for each site in the structure."," :param atol: the absolute tolerance on determining if two sites have the same magnetic moment."," :param ztol: threshold for considering a kind to have non-zero magnetic moment."],spec:{inputs:[{name:"magnetic_moment_per_site",required:!0,valid_types:"Data",info:"list of magnetic moments for each site in the structure."},{name:"structure",required:!0,valid_types:"Data",info:"a `StructureData` instance."},{name:"atol",required:!1,valid_types:"Data",info:"the absolute tolerance on determining if two sites have the same magnetic moment."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"ztol",required:!1,valid_types:"Data",info:"threshold for considering a kind to have non-zero magnetic moment."}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_quantumespresso.calculations.functions.create_magnetic_configuration:create_magnetic_configuration"},"quantumespresso.dos":{description:["`CalcJob` implementation for the dos.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"parent_folder",required:!0,valid_types:"RemoteData, FolderData",info:""},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters for the namelists in the input file."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Use an additional node for special settings"}],outputs:[{name:"output_dos",required:!0,valid_types:"XyData",info:""},{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:310,message:"An exception was raised while reading the `stdout` file: {exception}"},{status:311,message:"An exception was raised while parsing the `stdout` file: {exception}"},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:330,message:"The dos file could not be read from the retrieved folder."}]},class:"aiida_quantumespresso.calculations.dos:DosCalculation"},"quantumespresso.epw":{description:["`CalcJob` implementation for the epw.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"kfpoints",required:!0,valid_types:"KpointsData",info:"fine kpoint mesh"},{name:"kpoints",required:!0,valid_types:"KpointsData",info:"coarse kpoint mesh"},{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"parent_folder_nscf",required:!0,valid_types:"RemoteData",info:"the folder of a completed nscf `PwCalculation`"},{name:"parent_folder_ph",required:!0,valid_types:"RemoteData",info:"the folder of a completed `PhCalculation`"},{name:"qfpoints",required:!0,valid_types:"KpointsData",info:"fine qpoint mesh"},{name:"qpoints",required:!0,valid_types:"KpointsData",info:"coarse qpoint mesh"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"}]},class:"aiida_quantumespresso.calculations.epw:EpwCalculation"},"quantumespresso.matdyn":{description:["`CalcJob` implementation for the matdyn.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"force_constants",required:!0,valid_types:"ForceConstantsData",info:""},{name:"kpoints",required:!0,valid_types:"KpointsData",info:"Kpoints on which to calculate the phonon frequencies."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters for the namelists in the input file."},{name:"parent_folder",required:!1,valid_types:"RemoteData, FolderData, SinglefileData, NoneType",info:"Use a local or remote folder as parent folder (for restarts and similar)"},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Use an additional node for special settings"}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"output_phonon_bands",required:!0,valid_types:"BandsData",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:310,message:"An exception was raised while reading the `stdout` file: {exception}"},{status:311,message:"An exception was raised while parsing the `stdout` file: {exception}"},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:330,message:"The output frequencies file could not be read from the retrieved folder."},{status:410,message:"Number of kpoints not found in the output data"},{status:411,message:"Number of kpoints in the inputs is not commensurate with those in the output"}]},class:"aiida_quantumespresso.calculations.matdyn:MatdynCalculation"},"quantumespresso.merge_ph_outputs":{description:["Calcfunction to merge outputs from multiple `ph.x` calculations with different q-points."],spec:{inputs:[{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_quantumespresso.calculations.functions.merge_ph_outputs:merge_ph_outputs"},"quantumespresso.namelists":{description:["`CalcJob` implementation to serve as base class for simple post-processing tools of Quantum ESPRESSO."],spec:{inputs:[{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters for the namelists in the input file."},{name:"parent_folder",required:!1,valid_types:"RemoteData, FolderData, SinglefileData, NoneType",info:"Use a local or remote folder as parent folder (for restarts and similar)"},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Use an additional node for special settings"}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:310,message:"An exception was raised while reading the `stdout` file: {exception}"},{status:311,message:"An exception was raised while parsing the `stdout` file: {exception}"},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."}]},class:"aiida_quantumespresso.calculations.namelists:NamelistsCalculation"},"quantumespresso.neb":{description:["Nudged Elastic Band code (neb.x) of Quantum ESPRESSO distribution."],spec:{inputs:[{name:"first_structure",required:!0,valid_types:"StructureData",info:"Initial structure"},{name:"last_structure",required:!0,valid_types:"StructureData",info:"Final structure"},{name:"parameters",required:!0,valid_types:"Dict",info:"NEB-specific input parameters"},{name:"pw",required:!0,valid_types:"Data",info:""},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parent_folder",required:!1,valid_types:"RemoteData, NoneType",info:"An optional working directory of a previously completed calculation to restart from."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Optional parameters to affect the way the calculation job and the parsing are performed."}],outputs:[{name:"output_mep",required:!0,valid_types:"ArrayData",info:"The original and interpolated energy profiles along the minimum-energy path (mep)"},{name:"output_parameters",required:!0,valid_types:"Dict",info:"The output parameters dictionary of the NEB calculation"},{name:"output_trajectory",required:!0,valid_types:"TrajectoryData",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"iteration_array",required:!1,valid_types:"ArrayData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:303,message:"The required XML file is not present in the retrieved folder."},{status:320,message:"The XML output file could not be read."},{status:321,message:"The XML output file could not be parsed."},{status:322,message:"The XML output file has an unsupported format."},{status:350,message:"The parser raised an unexpected exception: {exception}"}]},class:"aiida_quantumespresso.calculations.neb:NebCalculation"},"quantumespresso.open_grid":{description:["``CalcJob`` implementation for the ``open_grid.x`` code of Quantum ESPRESSO."],spec:{inputs:[{name:"parent_folder",required:!0,valid_types:"RemoteData, FolderData",info:"The output folder of a completed `PwCalculation` on an irreducible Brillouin zone"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters for the namelists in the input file."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Use an additional node for special settings"}],outputs:[{name:"kpoints",required:!0,valid_types:"KpointsData",info:"The explicit list of kpoints of the unfolded kmesh"},{name:"kpoints_mesh",required:!0,valid_types:"KpointsData",info:"The dimensions of the unfolded kmesh"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"The retrieved folder data node could not be accessed."},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:310,message:"An exception was raised while reading the `stdout` file: {exception}"},{status:311,message:"An exception was raised while parsing the `stdout` file: {exception}"},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:312,message:"Found rotation or fractional translation not compatible with FFT grid."},{status:350,message:"Mismatch between kmesh dimensions and number of kpoints."}]},class:"aiida_quantumespresso.calculations.open_grid:OpenGridCalculation"},"quantumespresso.ph":{description:["`CalcJob` implementation for the ph.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"parent_folder",required:!0,valid_types:"RemoteData",info:"the folder of a completed `PwCalculation`"},{name:"qpoints",required:!0,valid_types:"KpointsData",info:"qpoint mesh"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:305,message:"Both the stdout and XML output files could not be read or parsed."},{status:310,message:"The stdout output file could not be read."},{status:311,message:"The stdout output file could not be parsed."},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:350,message:"The parser raised an unexpected exception: {exception}"},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:410,message:"The minimization cycle did not reach self-consistency."},{status:462,message:"The code failed during the cholesky factorization."}]},class:"aiida_quantumespresso.calculations.ph:PhCalculation"},"quantumespresso.pp":{description:["`CalcJob` implementation for the pp.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:"Use a node that specifies the input parameters for the namelists"},{name:"parent_folder",required:!0,valid_types:"RemoteData, FolderData",info:"Output folder of a completed `PwCalculation`"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Optional parameters to affect the way the calculation job is performed."}],outputs:[{name:"output_data",required:!0,valid_types:"ArrayData",info:""},{name:"output_data_multiple",required:!0,valid_types:"ArrayData",info:""},{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:301,message:"The retrieved temporary folder could not be accessed."},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:303,message:"The parent folder did not contain the required XML output file."},{status:310,message:"The stdout output file could not be read."},{status:311,message:"The stdout output file could not be parsed."},{status:312,message:"The stdout output file was incomplete."},{status:330,message:"The formatted data output file `{filename}` was not present in the retrieved (temporary) folder."},{status:331,message:"The formatted data output file `{filename}` could not be read."},{status:332,message:"The data file format is not supported by the parser"},{status:333,message:"The formatted data output file `{filename}` could not be parsed: {exception}"},{status:340,message:"The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart."},{status:350,message:"The parser raised an unexpected exception: {exception}"}]},class:"aiida_quantumespresso.calculations.pp:PpCalculation"},"quantumespresso.projwfc":{description:["`CalcJob` implementation for the projwfc.x code of Quantum ESPRESSO.",""," Projwfc.x code of the Quantum ESPRESSO distribution, handles the the computation of projections of bloch"," wavefunctions onto atomic orbitals.",""," . For more information, refer to http://www.quantum-espresso.org/"],spec:{inputs:[{name:"parent_folder",required:!0,valid_types:"RemoteData, FolderData",info:"The output folder of a pw.x calculation"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters for the namelists in the input file."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Use an additional node for special settings"}],outputs:[{name:"Dos",required:!0,valid_types:"XyData",info:""},{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"bands",required:!1,valid_types:"BandsData",info:""},{name:"bands_down",required:!1,valid_types:"BandsData",info:""},{name:"bands_up",required:!1,valid_types:"BandsData",info:""},{name:"projections",required:!1,valid_types:"ProjectionData",info:""},{name:"projections_down",required:!1,valid_types:"ProjectionData",info:""},{name:"projections_up",required:!1,valid_types:"ProjectionData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:301,message:"The retrieved temporary folder could not be accessed."},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:303,message:"The retrieved folder did not contain the required XML file."},{status:310,message:"An exception was raised while reading the `stdout` file: {exception}"},{status:311,message:"An exception was raised while parsing the `stdout` file: {exception}"},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:320,message:"The XML output file could not be read."},{status:321,message:"The XML output file could not be parsed."},{status:322,message:"The XML output file has an unsupported format."},{status:330,message:"The pdos_tot file could not be read from the retrieved folder."},{status:340,message:"An exception was raised parsing bands and projections."}]},class:"aiida_quantumespresso.calculations.projwfc:ProjwfcCalculation"},"quantumespresso.pw":{description:["`CalcJob` implementation for the pw.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"kpoints",required:!0,valid_types:"KpointsData",info:"kpoint mesh or kpoint path"},{name:"parameters",required:!0,valid_types:"Dict",info:"The input parameters that are to be used to construct the input file."},{name:"pseudos",required:!0,valid_types:"UpfData, UpfData",info:"A mapping of `UpfData` nodes onto the kind name to which they should apply."},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"hubbard_file",required:!1,valid_types:"SinglefileData, NoneType",info:"SinglefileData node containing the output Hubbard parameters from a HpCalculation"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parallelization",required:!1,valid_types:"Dict, NoneType",info:`Parallelization options. The following flags are allowed: +npool : The number of 'pools', each taking care of a group of k-points. +nband : The number of 'band groups', each taking care of a group of Kohn-Sham orbitals. +ntg : The number of 'task groups' across which the FFT planes are distributed. +ndiag : The number of 'linear algebra groups' used when parallelizing the subspace diagonalization / iterative orthonormalization. By default, no parameter is passed to Quantum ESPRESSO, meaning it will use its default.`},{name:"parent_folder",required:!1,valid_types:"RemoteData, NoneType",info:"An optional working directory of a previously completed calculation to restart from."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Optional parameters to affect the way the calculation job and the parsing are performed."},{name:"vdw_table",required:!1,valid_types:"SinglefileData, NoneType",info:"Optional van der Waals table contained in a `SinglefileData`."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"The `output_parameters` output node of the successful calculation."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_atomic_occupations",required:!1,valid_types:"Dict",info:""},{name:"output_band",required:!1,valid_types:"BandsData",info:"The `output_band` output node of the successful calculation if present."},{name:"output_kpoints",required:!1,valid_types:"KpointsData",info:""},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The `output_structure` output node of the successful calculation if present."},{name:"output_trajectory",required:!1,valid_types:"TrajectoryData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:301,message:"The retrieved temporary folder could not be accessed."},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:303,message:"The retrieved folder did not contain the required XML file."},{status:304,message:"The retrieved folder contained multiple XML files."},{status:305,message:"Both the stdout and XML output files could not be read or parsed."},{status:310,message:"The stdout output file could not be read."},{status:311,message:"The stdout output file could not be parsed."},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:320,message:"The XML output file could not be read."},{status:321,message:"The XML output file could not be parsed."},{status:322,message:"The XML output file has an unsupported format."},{status:340,message:"The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart."},{status:350,message:"The parser raised an unexpected exception: {exception}"},{status:360,message:"The code failed in finding a valid reciprocal lattice vector."},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:410,message:"The electronic minimization cycle did not reach self-consistency."},{status:461,message:"The code failed with negative dexx in the exchange calculation."},{status:462,message:"The code failed during the cholesky factorization."},{status:463,message:"Too many bands failed to converge during the diagonalization."},{status:464,message:"The S matrix was found to be not positive definite."},{status:465,message:"The `zhegvd` failed in the PPCG diagonalization."},{status:466,message:"The `[Q, R] = qr(X, 0)` failed in the PPCG diagonalization."},{status:467,message:"The eigenvector failed to converge."},{status:468,message:"The factorization in the Broyden routine failed."},{status:481,message:'The k-point parallelization "npools" is too high, some nodes have no k-points.'},{status:500,message:"The ionic minimization cycle did not converge for the given thresholds."},{status:501,message:"Then ionic minimization cycle converged but the thresholds are exceeded in the final SCF."},{status:502,message:"The ionic minimization cycle did not converge after the maximum number of steps."},{status:503,message:"The ionic minimization cycle did not finish because the calculation was interrupted but a partial trajectory and output structure was successfully parsed which can be used for a restart."},{status:510,message:"The electronic minimization cycle failed during an ionic minimization cycle."},{status:511,message:"The ionic minimization cycle converged, but electronic convergence was not reached in the final SCF."},{status:520,message:"The ionic minimization cycle terminated prematurely because of two consecutive failures in the BFGS algorithm."},{status:521,message:"The ionic minimization cycle terminated prematurely because of two consecutive failures in the BFGS algorithm and electronic convergence failed in the final SCF."},{status:531,message:"The electronic minimization cycle did not reach self-consistency."},{status:541,message:"The variable cell optimization broke the symmetry of the k-points."},{status:542,message:"The cell relaxation caused a significant volume contraction and there is not enough space allocated for radial FFT."},{status:710,message:"The electronic minimization cycle did not reach self-consistency, but `scf_must_converge` is `False` and/or `electron_maxstep` is 0."}]},class:"aiida_quantumespresso.calculations.pw:PwCalculation"},"quantumespresso.pw2gw":{description:["`CalcJob` implementation for the pw2gw.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"parent_folder",required:!0,valid_types:"RemoteData",info:"Output folder of a completed `PwCalculation`"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters for the namelists in the input file."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Use an additional node for special settings"}],outputs:[{name:"eps",required:!0,valid_types:"ArrayData",info:"The `eps` output node containing 5 arrays `energy`, `epsX`, `epsY`, `epsZ`, `epsTOT`"},{name:"output_parameters",required:!0,valid_types:"Dict",info:"The `output_parameters` output node of the successful calculation.`"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:305,message:"The eps*.dat output files could not be read or parsed."},{status:310,message:"An exception was raised while reading the `stdout` file: {exception}"},{status:311,message:"An exception was raised while parsing the `stdout` file: {exception}"},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:330,message:"The eps*.dat output files do not have the expected shape (N, 2)."},{status:331,message:"The eps*.dat output files contains different values of energies."},{status:350,message:"The parser raised an unexpected exception: {exception}"}]},class:"aiida_quantumespresso.calculations.pw2gw:Pw2gwCalculation"},"quantumespresso.pw2wannier90":{description:["`CalcJob` implementation for the pw2wannier.x code of Quantum ESPRESSO.",""," For more information, refer to http://www.quantum-espresso.org/ and http://www.wannier.org/"],spec:{inputs:[{name:"nnkp_file",required:!0,valid_types:"SinglefileData",info:"A SinglefileData containing the .nnkp file generated by wannier90.x -pp"},{name:"parent_folder",required:!0,valid_types:"RemoteData, FolderData",info:"The output folder of a pw.x calculation"},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters for the namelists in the input file."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Use an additional node for special settings"}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:310,message:"An exception was raised while reading the `stdout` file: {exception}"},{status:311,message:"An exception was raised while parsing the `stdout` file: {exception}"},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:340,message:"Encountered a generic error message"},{status:350,message:"The parser raised an unexpected exception: {exception}"}]},class:"aiida_quantumespresso.calculations.pw2wannier90:Pw2wannier90Calculation"},"quantumespresso.pwimmigrant":{description:["Create a PwCalculation object that can be used to import old jobs.",""," This is a sublass of aiida_quantumespresso.calculations.PwCalculation"," with slight modifications to some of the class variables and additional"," methods that",""," a. parse the job's input file to create the calculation's input"," nodes that would exist if the calculation were submitted using AiiDa,"," b. bypass the functions of the daemon, and prepare the node's attributes"," such that all the processes (copying of the files to the repository,"," results parsing, ect.) can be performed",""," .. note:: The keyword arguments of PwCalculation are also available.",""," :param remote_workdir: Absolute path to the directory where the job was run."," The transport of the computer you link ask input to the calculation is"," the transport that will be used to retrieve the calculation's files."," Therefore, ``remote_workdir`` should be the absolute path to the job's"," directory on that computer."," :type remote_workdir: str",""," :param input_file_name: The file name of the job's input file."," :type input_file_name: str",""," :param output_file_name: The file name of the job's output file (i.e. the"," file containing the stdout of QE)."," :type output_file_name: str"],spec:{inputs:[{name:"kpoints",required:!0,valid_types:"KpointsData",info:"kpoint mesh or kpoint path"},{name:"parameters",required:!0,valid_types:"Dict",info:"The input parameters that are to be used to construct the input file."},{name:"pseudos",required:!0,valid_types:"UpfData, UpfData",info:"A mapping of `UpfData` nodes onto the kind name to which they should apply."},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"hubbard_file",required:!1,valid_types:"SinglefileData, NoneType",info:"SinglefileData node containing the output Hubbard parameters from a HpCalculation"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parallelization",required:!1,valid_types:"Dict, NoneType",info:`Parallelization options. The following flags are allowed: +npool : The number of 'pools', each taking care of a group of k-points. +nband : The number of 'band groups', each taking care of a group of Kohn-Sham orbitals. +ntg : The number of 'task groups' across which the FFT planes are distributed. +ndiag : The number of 'linear algebra groups' used when parallelizing the subspace diagonalization / iterative orthonormalization. By default, no parameter is passed to Quantum ESPRESSO, meaning it will use its default.`},{name:"parent_folder",required:!1,valid_types:"RemoteData, NoneType",info:"An optional working directory of a previously completed calculation to restart from."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Optional parameters to affect the way the calculation job and the parsing are performed."},{name:"vdw_table",required:!1,valid_types:"SinglefileData, NoneType",info:"Optional van der Waals table contained in a `SinglefileData`."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"The `output_parameters` output node of the successful calculation."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_atomic_occupations",required:!1,valid_types:"Dict",info:""},{name:"output_band",required:!1,valid_types:"BandsData",info:"The `output_band` output node of the successful calculation if present."},{name:"output_kpoints",required:!1,valid_types:"KpointsData",info:""},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The `output_structure` output node of the successful calculation if present."},{name:"output_trajectory",required:!1,valid_types:"TrajectoryData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:301,message:"The retrieved temporary folder could not be accessed."},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:303,message:"The retrieved folder did not contain the required XML file."},{status:304,message:"The retrieved folder contained multiple XML files."},{status:305,message:"Both the stdout and XML output files could not be read or parsed."},{status:310,message:"The stdout output file could not be read."},{status:311,message:"The stdout output file could not be parsed."},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:320,message:"The XML output file could not be read."},{status:321,message:"The XML output file could not be parsed."},{status:322,message:"The XML output file has an unsupported format."},{status:340,message:"The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart."},{status:350,message:"The parser raised an unexpected exception: {exception}"},{status:360,message:"The code failed in finding a valid reciprocal lattice vector."},{status:400,message:"The calculation stopped prematurely because it ran out of walltime."},{status:410,message:"The electronic minimization cycle did not reach self-consistency."},{status:461,message:"The code failed with negative dexx in the exchange calculation."},{status:462,message:"The code failed during the cholesky factorization."},{status:463,message:"Too many bands failed to converge during the diagonalization."},{status:464,message:"The S matrix was found to be not positive definite."},{status:465,message:"The `zhegvd` failed in the PPCG diagonalization."},{status:466,message:"The `[Q, R] = qr(X, 0)` failed in the PPCG diagonalization."},{status:467,message:"The eigenvector failed to converge."},{status:468,message:"The factorization in the Broyden routine failed."},{status:481,message:'The k-point parallelization "npools" is too high, some nodes have no k-points.'},{status:500,message:"The ionic minimization cycle did not converge for the given thresholds."},{status:501,message:"Then ionic minimization cycle converged but the thresholds are exceeded in the final SCF."},{status:502,message:"The ionic minimization cycle did not converge after the maximum number of steps."},{status:503,message:"The ionic minimization cycle did not finish because the calculation was interrupted but a partial trajectory and output structure was successfully parsed which can be used for a restart."},{status:510,message:"The electronic minimization cycle failed during an ionic minimization cycle."},{status:511,message:"The ionic minimization cycle converged, but electronic convergence was not reached in the final SCF."},{status:520,message:"The ionic minimization cycle terminated prematurely because of two consecutive failures in the BFGS algorithm."},{status:521,message:"The ionic minimization cycle terminated prematurely because of two consecutive failures in the BFGS algorithm and electronic convergence failed in the final SCF."},{status:531,message:"The electronic minimization cycle did not reach self-consistency."},{status:541,message:"The variable cell optimization broke the symmetry of the k-points."},{status:542,message:"The cell relaxation caused a significant volume contraction and there is not enough space allocated for radial FFT."},{status:710,message:"The electronic minimization cycle did not reach self-consistency, but `scf_must_converge` is `False` and/or `electron_maxstep` is 0."}]},class:"aiida_quantumespresso.calculations.pwimmigrant:PwimmigrantCalculation"},"quantumespresso.q2r":{description:["`CalcJob` implementation for the q2r.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"parent_folder",required:!0,valid_types:"RemoteData, FolderData",info:""},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters for the namelists in the input file."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Use an additional node for special settings"}],outputs:[{name:"force_constants",required:!0,valid_types:"ForceConstantsData",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:310,message:"An exception was raised while reading the `stdout` file: {exception}"},{status:311,message:"An exception was raised while parsing the `stdout` file: {exception}"},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:330,message:"The force constants file could not be read."}]},class:"aiida_quantumespresso.calculations.q2r:Q2rCalculation"},"quantumespresso.seekpath_structure_analysis":{description:["Primitivize the structure with SeeKpath and generate the high symmetry k-point path through its Brillouin zone.",""," This calcfunction will take a structure and pass it through SeeKpath to get the normalized primitive cell and the"," path of high symmetry k-points through its Brillouin zone. Note that the returned primitive cell may differ from the"," original structure in which case the k-points are only congruent with the primitive cell.",""," The keyword arguments can be used to specify various Seekpath parameters, such as:",""," with_time_reversal: True"," reference_distance: 0.025"," recipe: 'hpkot'"," threshold: 1e-07"," symprec: 1e-05"," angle_tolerance: -1.0",""," Note that exact parameters that are available and their defaults will depend on your Seekpath version."],spec:{inputs:[{name:"structure",required:!0,valid_types:"Data",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_quantumespresso.calculations.functions.seekpath_structure_analysis:seekpath_structure_analysis"},"quantumespresso.xspectra":{description:["CalcJob implementation for the xspectra.x code of Quantum ESPRESSO."],spec:{inputs:[{name:"core_wfc_data",required:!0,valid_types:"SinglefileData",info:"Core wavefunction data, generated by the upf2plotcore.sh utility"},{name:"kpoints",required:!0,valid_types:"KpointsData",info:"The K-point sampling to be used for the XSpectra calculation"},{name:"parent_folder",required:!0,valid_types:"RemoteData",info:""},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"gamma_file",required:!1,valid_types:"SinglefileData, NoneType",info:"An optional file containing the data for the broadening function used when `gamma_mode=file`"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"parameters",required:!1,valid_types:"Dict, NoneType",info:"Parameters for the namelists in the input file."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Use an additional node for special settings"}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"spectra",required:!0,valid_types:"XyData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:302,message:"The retrieved folder did not contain the required stdout output file."},{status:310,message:"An exception was raised while reading the `stdout` file: {exception}"},{status:311,message:"An exception was raised while parsing the `stdout` file: {exception}"},{status:312,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:313,message:"xiabs was set incorrectly, check and ensure that the index value correctly refers to the atomic species containing the core-hole (where the index starts from 1)."},{status:314,message:"xiabs was either set to 0 or less, or was greater than ntyp."},{status:330,message:"The xspectra output file could not be read from the retrieved folder."},{status:331,message:"The spectrum data file could not be read using NumPy genfromtxt"},{status:400,message:"The time limit set for the calculation was exceeded, and the job wrote a save file before exiting."}]},class:"aiida_quantumespresso.calculations.xspectra:XspectraCalculation"}},"aiida.data":{"quantumespresso.force_constants":"aiida_quantumespresso.data.force_constants:ForceConstantsData","quantumespresso.hubbard_structure":"aiida_quantumespresso.data.hubbard_structure:HubbardStructureData"},"aiida.parsers":{"quantumespresso.cp":"aiida_quantumespresso.parsers.cp:CpParser","quantumespresso.dos":"aiida_quantumespresso.parsers.dos:DosParser","quantumespresso.matdyn":"aiida_quantumespresso.parsers.matdyn:MatdynParser","quantumespresso.neb":"aiida_quantumespresso.parsers.neb:NebParser","quantumespresso.open_grid":"aiida_quantumespresso.parsers.open_grid:OpenGridParser","quantumespresso.ph":"aiida_quantumespresso.parsers.ph:PhParser","quantumespresso.pp":"aiida_quantumespresso.parsers.pp:PpParser","quantumespresso.projwfc":"aiida_quantumespresso.parsers.projwfc:ProjwfcParser","quantumespresso.pw":"aiida_quantumespresso.parsers.pw:PwParser","quantumespresso.pw2gw":"aiida_quantumespresso.parsers.pw2gw:Pw2gwParser","quantumespresso.pw2wannier90":"aiida_quantumespresso.parsers.pw2wannier90:Pw2wannier90Parser","quantumespresso.q2r":"aiida_quantumespresso.parsers.q2r:Q2rParser","quantumespresso.xspectra":"aiida_quantumespresso.parsers.xspectra:XspectraParser"},"aiida.tools.calculations":{"quantumespresso.pw":"aiida_quantumespresso.tools.calculations.pw:PwCalculationTools"},"aiida.tools.data.orbitals":{noncollinearhydrogen:"aiida_quantumespresso.tools.data.orbital.noncollinearhydrogen:NoncollinearHydrogenOrbital",spinorbithydrogen:"aiida_quantumespresso.tools.data.orbital.spinorbithydrogen:SpinorbitHydrogenOrbital"},"aiida.workflows":{"quantumespresso.matdyn.base":{description:["Workchain to run a Quantum ESPRESSO matdyn.x calculation with automated error handling and restarts."],spec:{inputs:[{name:"matdyn",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"output_phonon_bands",required:!0,valid_types:"BandsData",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:300,message:"The calculation failed with an unrecoverable error."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_quantumespresso.workflows.matdyn.base:MatdynBaseWorkChain"},"quantumespresso.pdos":{description:["A WorkChain to compute Total & Partial Density of States of a structure, using Quantum Espresso."],spec:{inputs:[{name:"dos",required:!0,valid_types:"Data",info:"Input parameters for the `dos.x` calculation. Note that the `Emin`, `Emax` and `DeltaE` values have to match with those in the `projwfc` inputs."},{name:"nscf",required:!0,valid_types:"Data",info:"Inputs for the `PwBaseWorkChain` of the `nscf` calculation."},{name:"projwfc",required:!0,valid_types:"Data",info:"Input parameters for the `projwfc.x` calculation. Note that the `Emin`, `Emax` and `DeltaE` values have to match with those in the `dos` inputs."},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure."},{name:"align_to_fermi",required:!1,valid_types:"Bool",info:"If true, Emin=>Emin-Efermi & Emax=>Emax-Efermi, where Efermi is taken from the `nscf` calculation. Note that it only makes sense to align `Emax` and `Emin` to the fermi level in case they are actually provided by in the `dos` and `projwfc` inputs, since otherwise the "},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If ``True``, work directories of all called calculation will be cleaned at the end of execution."},{name:"dry_run",required:!1,valid_types:"Bool, NoneType",info:"Terminate workchain steps before submitting calculations (test purposes only)."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"scf",required:!1,valid_types:"Data",info:"Inputs for the `PwBaseWorkChain` of the `scf` calculation."},{name:"serial_clean",required:!1,valid_types:"Bool, NoneType",info:"If ``True``, calculations will be run in serial, and work directories will be cleaned before the next step."}],outputs:[{name:"dos",required:!0,valid_types:"",info:""},{name:"nscf",required:!0,valid_types:"",info:""},{name:"projwfc",required:!0,valid_types:"",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:202,message:"Neither the `kpoints` nor the `kpoints_distance` input was specified for base or nscf namespaces."},{status:401,message:"the SCF sub process failed"},{status:402,message:"the NSCF sub process failed"},{status:403,message:"the DOS sub process failed"},{status:404,message:"the PROJWFC sub process failed"},{status:404,message:"both the DOS and PROJWFC sub process failed"}]},class:"aiida_quantumespresso.workflows.pdos:PdosWorkChain"},"quantumespresso.ph.base":{description:["Workchain to run a Quantum ESPRESSO ph.x calculation with automated error handling and restarts."],spec:{inputs:[{name:"ph",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"only_initialization",required:!1,valid_types:"Bool",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:204,message:"The `metadata.options` did not specify both `resources.num_machines` and `max_wallclock_seconds`. This exit status has been deprecated as the check it corresponded to was incorrect."},{status:300,message:"The calculation failed with an unrecoverable error."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:401,message:"The work chain failed to merge the q-points data from multiple `PhCalculation`s because not all q-points were parsed."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_quantumespresso.workflows.ph.base:PhBaseWorkChain"},"quantumespresso.pw.bands":{description:["Workchain to compute a band structure for a given structure using Quantum ESPRESSO pw.x.",""," The logic for the computation of various parameters for the BANDS step is as follows:",""," Number of bands:"," One can specify the number of bands to be used in the BANDS step either directly through the input parameters"," `bands.pw.parameters.SYSTEM.nbnd` or through `nbands_factor`. Note that specifying both is not allowed. When"," neither is specified nothing will be set by the work chain and the default of Quantum ESPRESSO will end up being"," used. If the `nbands_factor` is specified the maximum value of the following values will be used:",""," * `nbnd` of the preceding SCF calculation"," * 0.5 * nelectrons * nbands_factor"," * 0.5 * nelectrons + 4",""," Kpoints:"," There are three options; specify either an existing `KpointsData` through `bands_kpoints`, or specify the"," `bands_kpoint_distance`, or specify neither. For the former those exact kpoints will be used for the BANDS step."," In the two other cases, the structure will first be normalized using SeekPath and the path along high-symmetry"," k-points will be generated on that structure. The distance between kpoints for the path will be equal to that"," of `bands_kpoints_distance` or the SeekPath default if not specified."],spec:{inputs:[{name:"bands",required:!0,valid_types:"Data",info:"Inputs for the `PwBaseWorkChain` for the BANDS calculation."},{name:"scf",required:!0,valid_types:"Data",info:"Inputs for the `PwBaseWorkChain` for the SCF calculation."},{name:"structure",required:!0,valid_types:"StructureData",info:"The inputs structure."},{name:"bands_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"Explicit kpoints to use for the BANDS calculation. Specify either this or `bands_kpoints_distance`."},{name:"bands_kpoints_distance",required:!1,valid_types:"Float, NoneType",info:"Minimum kpoints distance for the BANDS calculation. Specify either this or `bands_kpoints`."},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation will be cleaned at the end of execution."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"nbands_factor",required:!1,valid_types:"Float, NoneType",info:"The number of bands for the BANDS calculation is that used for the SCF multiplied by this factor."},{name:"relax",required:!1,valid_types:"Data",info:"Inputs for the `PwRelaxWorkChain`, if not specified at all, the relaxation step is skipped."}],outputs:[{name:"band_parameters",required:!0,valid_types:"Dict",info:"The output parameters of the BANDS `PwBaseWorkChain`."},{name:"band_structure",required:!0,valid_types:"BandsData",info:"The computed band structure."},{name:"scf_parameters",required:!0,valid_types:"Dict",info:"The output parameters of the SCF `PwBaseWorkChain`."},{name:"primitive_structure",required:!1,valid_types:"StructureData",info:"The normalized and primitivized structure for which the bands are computed."},{name:"seekpath_parameters",required:!1,valid_types:"Dict",info:"The parameters used in the SeeKpath call to normalize the input or relaxed structure."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:201,message:"Cannot specify both `nbands_factor` and `bands.pw.parameters.SYSTEM.nbnd`."},{status:202,message:"Cannot specify both `bands_kpoints` and `bands_kpoints_distance`."},{status:401,message:"The PwRelaxWorkChain sub process failed"},{status:402,message:"The scf PwBasexWorkChain sub process failed"},{status:403,message:"The bands PwBasexWorkChain sub process failed"}]},class:"aiida_quantumespresso.workflows.pw.bands:PwBandsWorkChain"},"quantumespresso.pw.base":{description:["Workchain to run a Quantum ESPRESSO pw.x calculation with automated error handling and restarts."],spec:{inputs:[{name:"pw",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"An explicit k-points list or mesh. Either this or `kpoints_distance` has to be provided."},{name:"kpoints_distance",required:!1,valid_types:"Float, NoneType",info:"The minimum desired distance in 1/Å between k-points in reciprocal space. The explicit k-points will be generated automatically by a calculation function based on the input structure."},{name:"kpoints_force_parity",required:!1,valid_types:"Bool, NoneType",info:"Optional input when constructing the k-points based on a desired `kpoints_distance`. Setting this to `True` will force the k-point mesh to have an even number of points along each lattice vector except for any non-periodic directions."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"The `output_parameters` output node of the successful calculation."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_atomic_occupations",required:!1,valid_types:"Dict",info:""},{name:"output_band",required:!1,valid_types:"BandsData",info:"The `output_band` output node of the successful calculation if present."},{name:"output_kpoints",required:!1,valid_types:"KpointsData",info:""},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The `output_structure` output node of the successful calculation if present."},{name:"output_trajectory",required:!1,valid_types:"TrajectoryData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:201,message:"The explicit `pseudos` or `pseudo_family` could not be used to get the necessary pseudos."},{status:202,message:"Neither the `kpoints` nor the `kpoints_distance` input was specified."},{status:203,message:"Neither the `options` nor `automatic_parallelization` input was specified. This exit status has been deprecated as the check it corresponded to was incorrect."},{status:204,message:"The `metadata.options` did not specify both `resources.num_machines` and `max_wallclock_seconds`. This exit status has been deprecated as the check it corresponded to was incorrect."},{status:210,message:"Required key for `automatic_parallelization` was not specified.This exit status has been deprecated as the automatic parallellization feature was removed."},{status:211,message:"Unrecognized keys were specified for `automatic_parallelization`.This exit status has been deprecated as the automatic parallellization feature was removed."},{status:300,message:"The calculation failed with an unidentified unrecoverable error."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:310,message:"The calculation failed with a known unrecoverable error."},{status:320,message:"The initialization calculation failed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."},{status:501,message:"Then ionic minimization cycle converged but the thresholds are exceeded in the final SCF."},{status:710,message:"The electronic minimization cycle did not reach self-consistency, but `scf_must_converge` is `False` and/or `electron_maxstep` is 0."}]},class:"aiida_quantumespresso.workflows.pw.base:PwBaseWorkChain"},"quantumespresso.pw.relax":{description:["Workchain to relax a structure using Quantum ESPRESSO pw.x."],spec:{inputs:[{name:"base",required:!0,valid_types:"Data",info:"Inputs for the `PwBaseWorkChain` for the main relax loop."},{name:"structure",required:!0,valid_types:"StructureData",info:"The inputs structure."},{name:"base_final_scf",required:!1,valid_types:"Data",info:"Inputs for the `PwBaseWorkChain` for the final scf."},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation will be cleaned at the end of execution."},{name:"max_meta_convergence_iterations",required:!1,valid_types:"Int",info:"The maximum number of variable cell relax iterations in the meta convergence cycle."},{name:"meta_convergence",required:!1,valid_types:"Bool",info:"If `True` the workchain will perform a meta-convergence on the cell volume."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"volume_convergence",required:!1,valid_types:"Float",info:"The volume difference threshold between two consecutive meta convergence iterations."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"The `output_parameters` output node of the successful calculation."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"output_atomic_occupations",required:!1,valid_types:"Dict",info:""},{name:"output_band",required:!1,valid_types:"BandsData",info:"The `output_band` output node of the successful calculation if present."},{name:"output_kpoints",required:!1,valid_types:"KpointsData",info:""},{name:"output_structure",required:!1,valid_types:"StructureData",info:"The successfully relaxed structure."},{name:"output_trajectory",required:!1,valid_types:"TrajectoryData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:401,message:"the relax PwBaseWorkChain sub process failed"},{status:402,message:"the final scf PwBaseWorkChain sub process failed"}]},class:"aiida_quantumespresso.workflows.pw.relax:PwRelaxWorkChain"},"quantumespresso.q2r.base":{description:["Workchain to run a Quantum ESPRESSO q2r.x calculation with automated error handling and restarts."],spec:{inputs:[{name:"q2r",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"force_constants",required:!0,valid_types:"ForceConstantsData",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:300,message:"The calculation failed with an unrecoverable error."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_quantumespresso.workflows.q2r.base:Q2rBaseWorkChain"},"quantumespresso.xps":{description:["Workchain to compute X-ray photoelectron spectra (XPS) for a given structure.",""," The WorkChain itself firstly calls the PwRelaxWorkChain to relax the input structure if"," required. Then determines the input settings for each XPS calculation automatically using"," ``get_xspectra_structures()``. The input structures are generated from the standardized"," structure by converting each to a supercell with cell dimensions of at least 8.0 angstrom"," in each periodic dimension in order to sufficiently reduce the unphysical interaction"," of the core-hole with neighbouring images. The size of the minimum size requirement can be"," overriden by the user if required. Then the standard Delta-Self-Consistent-Field (ΔSCF)"," method is used to get the XPS binding energy. Finally, the XPS spectrum is calculated"," using the Voigt profile."],spec:{inputs:[{name:"ch_scf",required:!0,valid_types:"Data",info:"Input parameters for the basic xps workflow (core-hole SCF)."},{name:"core_hole_pseudos",required:!0,valid_types:"UpfData, UpfData",info:'Dynamic namespace for pairs of excited-state pseudopotentials for each absorbing element. Must use the mapping "{element}" : {Upf}".'},{name:"gipaw_pseudos",required:!0,valid_types:"UpfData, UpfData",info:'Dynamic namespace for pairs of ground-state pseudopotentials for each absorbing element. Must use the mapping "{element}" : {Upf}".'},{name:"structure",required:!0,valid_types:"StructureData",info:"Structure to be used for calculation."},{name:"abs_atom_marker",required:!1,valid_types:"Str",info:"The name for the Kind representing the absorbing atom in the structure. Will be used in all structures generated in ``get_xspectra_structures`` step."},{name:"calc_binding_energy",required:!1,valid_types:"Bool",info:"If `True`, run scf calculation for the supercell."},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculations will be cleaned at the end of execution."},{name:"core_hole_treatments",required:!1,valid_types:"Dict, NoneType",info:"Optional dictionary to set core-hole treatment to all elements present. The default full-core-hole treatment will be used if not specified."},{name:"correction_energies",required:!1,valid_types:"Dict, NoneType",info:"Optional dictionary to set the correction energy to all elements present. "},{name:"dry_run",required:!1,valid_types:"Bool, NoneType",info:"Terminate workchain steps before submitting calculations (test purposes only)."},{name:"elements_list",required:!1,valid_types:"List, NoneType",info:"The list of elements to be considered for analysis, each must be valid elements of the periodic table."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"relax",required:!1,valid_types:"Data",info:"Input parameters for the relax process. If not specified at all, the relaxation step is skipped."},{name:"spglib_settings",required:!1,valid_types:"Dict, NoneType",info:"Optional settings dictionary for the spglib call within ``get_xspectra_structures``."},{name:"structure_preparation_settings",required:!1,valid_types:"Dict, Float, Int, Bool, Str",info:"Optional settings dictionary for the ``get_xspectra_structures()`` method."},{name:"voight_gamma",required:!1,valid_types:"Float",info:"The gamma parameter for the Lorenzian broadening in the Voight method."},{name:"voight_sigma",required:!1,valid_types:"Float",info:"The sigma parameter for the gaussian broadening in the Voight method."}],outputs:[{name:"binding_energies",required:!0,valid_types:"Dict",info:"All the binding energy values for each element calculated by the WorkChain."},{name:"chemical_shifts",required:!0,valid_types:"Dict",info:"All the chemical shift values for each element calculated by the WorkChain."},{name:"final_spectra_be",required:!0,valid_types:"XyData",info:"The fully-resolved spectra for each element based on binding energy."},{name:"final_spectra_cls",required:!0,valid_types:"XyData",info:"The fully-resolved spectra for each element based on chemical shift."},{name:"output_parameters_ch_scf",required:!0,valid_types:"Dict",info:"The output parameters of each ``PwBaseWorkChain`` performed``."},{name:"supercell_structure",required:!0,valid_types:"StructureData",info:"The supercell of ``outputs.standardized_structure`` used to generate structures for XPS sub-processes."},{name:"symmetry_analysis_data",required:!0,valid_types:"Dict",info:"The output parameters from ``get_xspectra_structures()``."},{name:"optimized_structure",required:!1,valid_types:"StructureData",info:"The optimized structure from the ``relax`` process."},{name:"output_parameters_relax",required:!1,valid_types:"Dict",info:"The output_parameters of the relax step."},{name:"output_parameters_scf",required:!1,valid_types:"Dict",info:"The output_parameters of the scf step."},{name:"standardized_structure",required:!1,valid_types:"StructureData",info:"The standardized crystal structure used to generate structures for XPS sub-processes."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:401,message:"The Relax sub process failed"},{status:402,message:"The SCF Pw sub processes failed"},{status:402,message:"One or more CH_SCF Pw sub processes failed"}]},class:"aiida_quantumespresso.workflows.xps:XpsWorkChain"},"quantumespresso.xspectra.base":{description:["Workchain to run a Quantum ESPRESSO xspectra.x calculation with automated error handling and restarts."],spec:{inputs:[{name:"xspectra",required:!0,valid_types:"Data",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict, NoneType",info:"Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration."},{name:"kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"An explicit k-points mesh. Either this or `kpoints_distance` has to be provided."},{name:"kpoints_distance",required:!1,valid_types:"Float, NoneType",info:"The minimum desired distance in 1/Å between k-points in reciprocal space. The explicit k-points will be generated automatically by a calculation function based on the input structure."},{name:"kpoints_force_parity",required:!1,valid_types:"Bool, NoneType",info:"Optional input when constructing the k-points based on a desired `kpoints_distance`. Setting this to `True` will force the k-point mesh to have an even number of points along each lattice vector except for any non-periodic directions."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"spectra",required:!0,valid_types:"XyData",info:""},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:202,message:"Neither the `kpoints` nor the `kpoints_distance` input was specified."},{status:300,message:"The calculation failed with an unrecoverable error."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_quantumespresso.workflows.xspectra.base:XspectraBaseWorkChain"},"quantumespresso.xspectra.core":{description:["Workchain to compute X-ray absorption spectra for a given structure using Quantum ESPRESSO.",""," The workflow follows the process required to compute the XAS of an input structure: an SCF calculation is performed"," using the provided structure, which is then followed by the calculation of the XAS itself by XSpectra. The"," calculations performed by the WorkChain in a typical run will be:",""," - PwSCF calculation with pw.x of the input structure with a core-hole present."," - Generation of core-wavefunction data with upf2plotcore.sh (if requested)."," - XAS calculation with xspectra.x to compute the Lanczos coefficients and print the XANES spectra for the"," polarisation vectors requested in the input."," - Collation of output data from pw.x and xspectra.x calculations, including a combination of XANES dipole spectra"," based on polarisation vectors to represent the powder spectrum of the structure (if requested).",""," If ``run_replot = True`` is set in the inputs (defaults to False), the WorkChain will run a second xspectra.x"," calculation which replots the spectra produced from the ``xs_prod`` step. This option can be very useful for"," obtaining a final spectrum at low levels of broadening (relative to the default of 0.5 eV), particularly as higher"," levels of broadening significantly speed up the convergence of the Lanczos procedure. Inputs for the replot"," calculation are found in the ``xs_plot`` namespace.",""," The core-wavefunction plot derived from the ground-state of the absorbing element can be provided as a top-level"," input or produced by the WorkChain. If left to the WorkChain, the ground-state pseudopotential assigned to the"," absorbing element will be used to generate this data using the upf2plotcore.sh utility script (via the"," ``aiida-shell`` plugin).",""," In its current stage of development, the workflow requires the following:",""," - An input structure where the desired absorbing atom in the system is marked as a separate Kind. The default"," behaviour for the WorkChain is to set the Kind name as 'X', however this can be changed via the `overrides`"," dictionary."," - A code node for ``upf2plotcore``, configured for the ``aiida-shell`` plugin"," (https://github.com/sphuber/aiida-shell). Alternatively, a ``SinglefileData`` node from a previous ``ShellJob``"," run can be supplied under ``inputs.core_wfc_data``."," - A suitable pair of pseudopotentials for the element type of the absorbing atom, one for the ground-state occupancy"," which contains GIPAW informtation for the core level of interest for the XAS (e.g. 1s in the case of a K-edge"," calculation) and the other containing a core hole. (For the moment this can be passed either via the"," ``core_hole_pseudos`` field in ``get_builder_from_protocol`` or via the overrides, but will be changed later once"," full families of core-hole pseudopotentials become available)."],spec:{inputs:[{name:"eps_vectors",required:!0,valid_types:"List",info:"The list of 3-vectors to use in XSpectra sub-processes. The number of sub-lists will subsequently define the number of XSpectra calculations to perform"},{name:"scf",required:!0,valid_types:"Data",info:"Input parameters for the `pw.x` calculation."},{name:"structure",required:!0,valid_types:"StructureData",info:"Structure to be used for calculation, with at least one site containing the `abs_atom_marker` as the kind label."},{name:"xs_prod",required:!0,valid_types:"Data",info:"Input parameters for the `xspectra.x` calculation to compute the Lanczos."},{name:"abs_atom_marker",required:!1,valid_types:"Str, NoneType",info:"The name for the Kind representing the absorbing atom in the structure. Must corespond to a Kind within the StructureData node supplied to the calculation."},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation will be cleaned at the end of execution."},{name:"core_wfc_data",required:!1,valid_types:"SinglefileData, NoneType",info:"The core wavefunction data file extracted from the ground-state pseudo for the absorbing atom."},{name:"dry_run",required:!1,valid_types:"Bool, NoneType",info:"Terminate workchain steps before submitting calculations (test purposes only)."},{name:"get_powder_spectrum",required:!1,valid_types:"Bool",info:"If `True`, the WorkChain will combine XANES dipole spectra computed using the XAS basis vectors defined according to the `get_powder_spectrum` CalcFunction."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"run_replot",required:!1,valid_types:"Bool",info:""},{name:"upf2plotcore_code",required:!1,valid_types:"Code, NoneType",info:"The code node required for upf2plotcore.sh configured for ``aiida-shell``. Must be provided if `core_wfc_data` is not provided."},{name:"xs_plot",required:!1,valid_types:"Data",info:"Input parameters for the re-plot `xspectra.x` calculation of the Lanczos."}],outputs:[{name:"parameters_scf",required:!0,valid_types:"Dict",info:"The output parameters of the SCF `PwBaseWorkChain`."},{name:"parameters_xspectra",required:!0,valid_types:"Dict",info:"The output dictionaries of each `XspectraBaseWorkChain` performed"},{name:"spectra",required:!0,valid_types:"XyData",info:"An XyData node containing all the final spectra produced by the WorkChain."},{name:"powder_spectrum",required:!1,valid_types:"XyData",info:"The simulated powder spectrum"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:401,message:"The SCF sub process failed"},{status:402,message:"One or more XSpectra sub processes failed"},{status:403,message:"The pseudo for the absorbing element contains no GIPAW information."}]},class:"aiida_quantumespresso.workflows.xspectra.core:XspectraCoreWorkChain"},"quantumespresso.xspectra.crystal":{description:["Workchain to compute all X-ray absorption spectra for a given structure using Quantum ESPRESSO.",""," The WorkChain follows the process required to compute all the K-edge XAS spectra for each"," element in a given structure. The WorkChain itself firstly calls the PwRelaxWorkChain to"," relax the input structure, then determines the input settings for each XAS"," calculation automatically using ``get_xspectra_structures()``:",""," - Firstly the input structure is converted to its conventional standard cell using"," ``spglib`` and detects the space group number for the conventional cell."," - Symmetry analysis of the standardized structure using ``spglib`` is then used to"," determine the number of non-equivalent atomic sites in the structure for each"," element considered for analysis.",""," Using the symmetry data returned from ``get_xspectra_structures``, input structures for"," the XspectraCoreWorkChain are generated from the standardized structure by converting each"," to a supercell with cell dimensions of at least 8.0 angstroms in each periodic dimension -"," required in order to sufficiently reduce the unphysical interaction of the core-hole with"," neighbouring images. The size of the minimum size requirement can be overriden by the"," user if required. The WorkChain then uses the space group number to set the list of"," polarisation vectors for the ``XspectraCoreWorkChain`` to compute for all subsequent"," calculations."],spec:{inputs:[{name:"core",required:!0,valid_types:"Data",info:"Input parameters for the basic xspectra workflow (core-hole SCF + XAS."},{name:"core_hole_pseudos",required:!0,valid_types:"UpfData, UpfData",info:'Dynamic namespace for pairs of excited-state pseudopotentials for each absorbing element. Must use the mapping "{element}" : {Upf}".'},{name:"elements_list",required:!0,valid_types:"List",info:"The list of elements to be considered for analysis, each must be a valid element of the periodic table."},{name:"gipaw_pseudos",required:!0,valid_types:"UpfData, UpfData",info:'Dynamic namespace for pairs of ground-state pseudopotentials for each absorbing element. Must use the mapping "{element}" : {Upf}.'},{name:"structure",required:!0,valid_types:"StructureData",info:"Structure to be used for calculation."},{name:"abs_atom_marker",required:!1,valid_types:"Str",info:"The name for the Kind representing the absorbing atom in the structure. Will be used in all structures generated in ``get_xspectra_structures`` step."},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculations will be cleaned at the end of execution."},{name:"core_hole_treatments",required:!1,valid_types:"Dict, NoneType",info:"Optional dictionary to set core-hole treatment to given elements present. The default full-core-hole treatment will be used if not specified."},{name:"core_wfc_data",required:!1,valid_types:"SinglefileData",info:"Input namespace to provide core wavefunction inputs for each element. Must follow the format: ``core_wfc_data__{symbol} = {node}``"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"relax",required:!1,valid_types:"Data",info:"Input parameters for the relax process. If not specified at all, the relaxation step is skipped."},{name:"return_all_powder_spectra",required:!1,valid_types:"Bool",info:"If ``True``, the WorkChain will return all ``powder_spectrum`` nodes from each ``XspectraCoreWorkChain`` sub-process."},{name:"spglib_settings",required:!1,valid_types:"Dict, NoneType",info:"Optional settings dictionary for the spglib call within ``get_xspectra_structures``."},{name:"structure_preparation_settings",required:!1,valid_types:"Dict, Float, Int, Bool, Str",info:"Optional settings dictionary for the ``get_xspectra_structures()`` method."},{name:"upf2plotcore_code",required:!1,valid_types:"Code, NoneType",info:"Code node for the upf2plotcore.sh ShellJob code."}],outputs:[{name:"final_spectra",required:!0,valid_types:"XyData",info:"The fully-resolved spectra for each element"},{name:"supercell_structure",required:!0,valid_types:"StructureData",info:"The supercell of ``outputs.standardized_structure`` used to generate structures for XSpectra sub-processes."},{name:"symmetry_analysis_data",required:!0,valid_types:"Dict",info:"The output parameters from ``get_xspectra_structures()``."},{name:"optimized_structure",required:!1,valid_types:"StructureData",info:"The optimized structure from the ``relax`` process."},{name:"parameters_relax",required:!1,valid_types:"Dict",info:"The output_parameters of the relax step."},{name:"parameters_scf",required:!1,valid_types:"Dict",info:"The output parameters of each ``PwBaseWorkChain`` performed in each ``XspectraCoreWorkChain``."},{name:"parameters_xspectra",required:!1,valid_types:"Dict",info:"The output dictionaries of each `XspectraCalculation` performed"},{name:"powder_spectra",required:!1,valid_types:"XyData",info:"All the spectra generated by the WorkChain."},{name:"standardized_structure",required:!1,valid_types:"StructureData",info:"The standardized crystal structure used to generate structures for XSpectra sub-processes."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:401,message:"The Relax sub process failed"},{status:402,message:"One or more XSpectra workflows failed"},{status:403,message:"The pseudos for one or more absorbing elements contain no GIPAW information."}]},class:"aiida_quantumespresso.workflows.xspectra.crystal:XspectraCrystalWorkChain"}},console_scripts:{"aiida-quantumespresso":"aiida_quantumespresso.cli:cmd_root"}},commits_count:93,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:20},{colorclass:"brown",text:"Parsers",count:13},{colorclass:"red",text:"Data",count:2},{colorclass:"green",text:"Workflows",count:11},{colorclass:"purple",text:"Console scripts",count:1},{colorclass:"orange",text:"Other (Tools calculations, Tools data orbitals)",count:3}],pip_install_cmd:"pip install aiida-quantumespresso",is_installable:"True"},"aiida-quantumespresso-hp":{code_home:"https://github.com/sphuber/aiida-quantumespresso-hp",entry_point_prefix:"quantumespresso.hp",pip_url:"git+https://github.com/sphuber/aiida-quantumespresso-hp",name:"aiida-quantumespresso-hp",package_name:"aiida_quantumespresso_hp",hosted_on:"github.com",metadata:{author:"Sebastiaan P. Huber",author_email:"mail@sphuber.net",version:"0.1.0",description:"The AiiDA plugin for the Hubbard module of Quantum ESPRESSO",classifiers:["License :: OSI Approved :: MIT License","Programming Language :: Python :: 2.7","Development Status :: 4 - Beta"]},aiida_version:">=1.0.0b6,<2.0",entry_points:{"aiida.calculations":{"quantumespresso.hp":"aiida_quantumespresso_hp.calculations.hp:HpCalculation"},"aiida.parsers":{"quantumespresso.hp":"aiida_quantumespresso_hp.parsers.hp:HpParser"},"aiida.workflows":{"quantumespresso.hp.main":"aiida_quantumespresso_hp.workflows.hp.main:HpWorkChain","quantumespresso.hp.parallelize_atoms":"aiida_quantumespresso_hp.workflows.hp.parallelize_atoms:HpParallelizeAtomsWorkChain","quantumespresso.hp.base":"aiida_quantumespresso_hp.workflows.hp.base:HpBaseWorkChain","quantumespresso.hp.hubbard":"aiida_quantumespresso_hp.workflows.hubbard:SelfConsistentHubbardWorkChain"},console_scripts:{launch_calculation_hp:"aiida_quantumespresso_hp.cli.calculations.hp:launch",launch_workflow_hp_base:"aiida_quantumespresso_hp.cli.workflows.hp.base:launch",launch_workflow_hp_main:"aiida_quantumespresso_hp.cli.workflows.hp.main:launch",launch_workflow_hp_hubbard:"aiida_quantumespresso_hp.cli.workflows.hubbard:launch"}},commits_count:0,development_status:"beta",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:4},{colorclass:"purple",text:"Console scripts",count:4}],pip_install_cmd:"pip install git+https://github.com/sphuber/aiida-quantumespresso-hp",is_installable:"True"},"aiida-raspa":{code_home:"https://github.com/yakutovicha/aiida-raspa",entry_point_prefix:"raspa",pip_url:"aiida-raspa",name:"aiida-raspa",package_name:"aiida_raspa",hosted_on:"github.com",metadata:{description:"AiiDA plugin for RASPA code",author:"Aliaksandr Yakutovich",author_email:"aliaksandr.yakutovich@epfl.ch",license:"MIT License",home_page:"https://github.com/yakutovicha/aiida-raspa",classifiers:["Development Status :: 5 - Production/Stable","Framework :: AiiDA","License :: OSI Approved :: MIT License","Programming Language :: Python :: 3"],version:"1.2.0"},aiida_version:null,entry_points:{"aiida.calculations":{raspa:{description:["This is a RaspaCalculation, subclass of CalcJob, to prepare input for RASPA code."," For information on RASPA, refer to: https://github.com/iraspa/raspa2."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"Dict",info:"Input parameters"},{name:"block_pocket",required:!1,valid_types:"SinglefileData",info:"Zeo++ block pocket file"},{name:"file",required:!1,valid_types:"SinglefileData",info:"Additional input file(s)"},{name:"framework",required:!1,valid_types:"CifData",info:"Input framework(s)"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"parent_folder",required:!1,valid_types:"RemoteData",info:"Remote folder used to continue the same simulation stating from the binary restarts."},{name:"retrieved_parent_folder",required:!1,valid_types:"FolderData",info:"To use an old calculation as a starting poing for a new one."},{name:"settings",required:!1,valid_types:"Dict",info:"Additional input parameters"}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"The results of a calculation"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"warnings",required:!1,valid_types:"List",info:"Warnings that appeared during the calculation"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The retrieved folder data node could not be accessed."},{status:101,message:"The retrieved folder does not contain an output file."},{status:102,message:'The output does not contain "Starting simulation".'},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:500,message:"The calculation could not be completed due to the lack of time."}]},class:"aiida_raspa.calculations:RaspaCalculation"}},"aiida.parsers":{raspa:"aiida_raspa.parsers:RaspaParser"},"aiida.workflows":{"raspa.base":{description:["Workchain to run a RASPA calculation with automated error handling and restarts."],spec:{inputs:[{name:"raspa",required:!0,valid_types:"",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"max_iterations",required:!1,valid_types:"Int",info:"Maximum number of iterations the work chain will restart the process to finish successfully."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"The results of a calculation"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"warnings",required:!1,valid_types:"List",info:"Warnings that appeared during the calculation"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_raspa.workchains:RaspaBaseWorkChain"}}},commits_count:0,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install aiida-raspa",is_installable:"True"},"aiida-shell":{code_home:"https://github.com/sphuber/aiida-shell",entry_point_prefix:"core",pip_url:"aiida-shell",plugin_info:"https://raw.github.com/sphuber/aiida-shell/master/pyproject.toml",name:"aiida-shell",package_name:"aiida_shell",hosted_on:"github.com",metadata:{description:"AiiDA plugin that makes running shell commands easy.",author_email:'"Sebastiaan P. Huber" ',classifiers:["Development Status :: 3 - Alpha","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering"],version:"0.5.3"},aiida_version:">=2.1,<3.0",entry_points:{"aiida.calculations":{"core.shell":{description:["Implementation of :class:`aiida.engine.CalcJob` to run a simple shell command."],spec:{inputs:[{name:"code",required:!0,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"arguments",required:!1,valid_types:"List, NoneType",info:""},{name:"filenames",required:!1,valid_types:"Dict, NoneType",info:""},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"nodes",required:!1,valid_types:"Data",info:""},{name:"outputs",required:!1,valid_types:"List, NoneType",info:""},{name:"parser",required:!1,valid_types:"PickledData, NoneType",info:""},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:300,message:"Exit status could not be determined: exit status file was not retrieved."},{status:301,message:"Exit status could not be determined: exit status file does not contain a valid integer."},{status:302,message:"The stdout file was not retrieved."},{status:303,message:"One or more output files defined in the `outputs` input were not retrieved: {missing_filepaths}."},{status:310,message:"Callable specified in the `parser` input excepted: {exception}."},{status:400,message:"The command exited with a non-zero status: {status} {stderr}."},{status:410,message:"The command exited with a zero status but the stderr was not empty."}]},class:"aiida_shell.calculations.shell:ShellJob"}},"aiida.data":{"core.code.installed.shell":"aiida_shell.data.code:ShellCode","core.pickled":"aiida_shell.data.pickled:PickledData"},"aiida.parsers":{"core.shell":"aiida_shell.parsers.shell:ShellParser"}},commits_count:46,development_status:"alpha",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:2}],pip_install_cmd:"pip install aiida-shell",is_installable:"True"},"aiida-siesta":{code_home:"https://github.com/siesta-project/aiida_siesta_plugin/tree/master",documentation_url:"https://aiida-siesta-plugin.readthedocs.io/",entry_point_prefix:"siesta",pip_url:"aiida-siesta",name:"aiida-siesta",package_name:"aiida_siesta",hosted_on:"github.com",metadata:{description:"A plugin for Siesta's basic functionality within the AiiDA framework.",author_email:'Albero Garcia , "Victor M. Garcia-Suarez" , Emanuele Bosoni , Vladimir Dikan , Pol Febrer ',classifiers:["Development Status :: 5 - Production/Stable","Framework :: AiiDA","License :: OSI Approved :: MIT License","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"],version:"2.0.0"},aiida_version:">=2.0.0,<3.0.0",entry_points:{},commits_count:8,development_status:"stable",summaryinfo:[],pip_install_cmd:"pip install aiida-siesta",is_installable:"True"},"aiida-spex":{code_home:"https://github.com/JuDFTteam/aiida-spex",entry_point_prefix:"spex",pip_url:"git+https://github.com/JuDFTteam/aiida-spex",name:"aiida-spex",package_name:"aiida_spex",hosted_on:"github.com",metadata:{author:"The SPEX Team",author_email:"a.chandran@fz-juelich.de",version:"1.1.2",description:"AiiDA plugin for SPEX code",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Framework :: AiiDA"]},aiida_version:">=1.0.0b3,<3.0.0",entry_points:{"aiida.calculations":{"spex.spex":"aiida_spex.calculations.spex:SpexCalculation"},"aiida.data":{"spex.spexinp":"aiida_spex.data.spexinp:SpexinpData"},"aiida.parsers":{"spex.spexparser":"aiida_spex.parsers.spex:SpexParser"},"aiida.workflows":{"spex.job":"aiida_spex.workflows.job:SpexJobWorkchain"}},commits_count:0,development_status:"planning",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:1},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install git+https://github.com/JuDFTteam/aiida-spex"},"aiida-spirit":{code_home:"https://github.com/JuDFTteam/aiida-spirit/tree/main",documentation_url:"https://aiida-spirit.readthedocs.io/",entry_point_prefix:"spirit",name:"aiida-spirit",pip_url:"aiida-spirit",package_name:"aiida_spirit",hosted_on:"github.com",metadata:{description:"AiiDA plugin for the spirit code",author:"The JuDFT Team",author_email:"p.ruessmann@fz-juelich.de",license:"MIT",home_page:"https://github.com/JuDFTteam/aiida-spirit",classifiers:["Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python"],version:"0.2.2"},aiida_version:null,entry_points:{"aiida.calculations":{spirit:"aiida_spirit.calculations:SpiritCalculation"},"aiida.parsers":{spirit:"aiida_spirit.parsers:SpiritParser"}},commits_count:9,development_status:"planning",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install aiida-spirit"},"aiida-sshonly":{code_home:"https://github.com/adegomme/aiida-sshonly",development_status:"beta",entry_point_prefix:"sshonly",pip_url:"aiida-sshonly",plugin_info:"https://raw.github.com/adegomme/aiida-sshonly/master/setup.json",name:"aiida-sshonly",package_name:"aiida_sshonly",hosted_on:"github.com",metadata:{description:"AiiDA plugin adding a sshonly transport option, using only SSH to transfer files, avoiding SFTP, in case it's blocked or non functional on a remote system",author:"adegomme",license:"MIT",home_page:"https://github.com/adegomme/aiida-sshonly",classifiers:["Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python"],version:"0.1.0a3"},aiida_version:">=1.3.0,<2.0.0",entry_points:{"aiida.transports":{ssh_only:"aiida_sshonly.transports.sshonly:SshOnlyTransport"}},commits_count:0,summaryinfo:[{colorclass:"orange",text:"Other (Transports)",count:1}],pip_install_cmd:"pip install --pre aiida-sshonly",is_installable:"True"},"aiida-statefile-schedulers":{code_home:"https://github.com/dev-zero/aiida-statefile-schedulers",development_status:"beta",entry_point_prefix:"statefile_schedulers",pip_url:"aiida-statefile-schedulers",name:"aiida-statefile-schedulers",package_name:"aiida_statefile_schedulers",hosted_on:"github.com",metadata:{description:"Simple statefile-driven task schedulers for AiiDA",author:"Tiziano Müller",author_email:"tm@dev-zero.ch",license:"MIT",home_page:"https://github.com/dev-zero/aiida-statefile-schedulers",classifiers:["Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python"],version:"0.2.1"},aiida_version:null,entry_points:{"aiida.schedulers":{"statefile_schedulers.direct":"aiida_statefile_schedulers.schedulers.direct:StatefileDirectScheduler"}},commits_count:0,summaryinfo:[{colorclass:"orange",text:"Other (Schedulers)",count:1}],pip_install_cmd:"pip install aiida-statefile-schedulers",is_installable:"True"},"aiida-strain":{code_home:"https://github.com/greschd/aiida-strain",documentation_url:"https://aiida-strain.readthedocs.io",entry_point_prefix:"strain",pip_url:"aiida-strain",name:"aiida-strain",package_name:"aiida_strain",hosted_on:"github.com",metadata:{description:"AiiDA Plugin for applying strain to structures",author:"Dominik Gresch",author_email:"greschd@gmx.ch",license:"Apache 2.0",home_page:"https://aiida-strain.readthedocs.io",classifiers:["Development Status :: 3 - Alpha","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: Apache Software License","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Topic :: Scientific/Engineering :: Physics"],version:"0.2.0"},aiida_version:null,entry_points:{"aiida.workflows":{"strain.apply_strains":{description:["Workchain to create strained structures from a given input structure."],spec:{inputs:[{name:"strain_kind",required:!0,valid_types:"Str",info:""},{name:"strain_parameters",required:!0,valid_types:"Str",info:""},{name:"strain_strengths",required:!0,valid_types:"List",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_strain:ApplyStrains"},"strain.apply_strains_with_symmetry":{description:["Workchain to create strained structures from an input structure, and select the symmetries which are compatible with the strained structure from a set of given input symmetries."],spec:{inputs:[{name:"strain_kind",required:!0,valid_types:"Str",info:""},{name:"strain_parameters",required:!0,valid_types:"Str",info:""},{name:"strain_strengths",required:!0,valid_types:"List",info:""},{name:"structure",required:!0,valid_types:"StructureData",info:""},{name:"symmetries",required:!0,valid_types:"SinglefileData",info:""},{name:"symmetry_repr_code",required:!0,valid_types:"Code",info:""},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_strain:ApplyStrainsWithSymmetry"}}},commits_count:0,development_status:"alpha",summaryinfo:[{colorclass:"green",text:"Workflows",count:2}],pip_install_cmd:"pip install aiida-strain",is_installable:"True"},"aiida-supercell":{code_home:"https://github.com/pzarabadip/aiida-supercell",development_status:"stable",documentation_url:"https://aiida-supercell.readthedocs.io/",entry_point_prefix:"supercell",pip_url:"git+https://github.com/pzarabadip/aiida-supercell",name:"aiida-supercell",package_name:"aiida_supercell",hosted_on:"github.com",metadata:{author:"Pezhman Zarabadi-Poor",author_email:"pzarabadip@gmail.com",version:"1.0.1",description:"AiiDA Plugin for Supercell program",classifiers:["Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9"]},aiida_version:">=1.0.0,<2.0",entry_points:{"aiida.calculations":{supercell:{description:["This is a SupercellCalculation, subclass of JobCalculation,"," to prepare input for enumerating structures using Supercell program"],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"structure",required:!0,valid_types:"StructureData, SinglefileData",info:"Input structure"},{name:"supercell_size",required:!0,valid_types:"List",info:"Supercell size for enumeration"},{name:"calculate_coulomb_energies",required:!1,valid_types:"Bool",info:"Whether to calculate Coulomb energies"},{name:"charge_balance_method",required:!1,valid_types:"Str",info:"Method to use for charge balancing"},{name:"charges",required:!1,valid_types:"Dict",info:"Dictionary of formal charges to be used"},{name:"merge_symmetric",required:!1,valid_types:"Bool",info:"Whether to merge symmetrically distinct configurations"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"random_seed",required:!1,valid_types:"Int",info:"Random seed number"},{name:"sample_structures",required:!1,valid_types:"Dict",info:"How to sample structures from huge configuration space"},{name:"save_as_archive",required:!1,valid_types:"Bool",info:"Whether to save resulting structures as archive"},{name:"tolerance",required:!1,valid_types:"Float",info:"The maximum distance (in Angstroms) between sites that should be contained within the same group."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"the results of the calculation"},{name:"output_structures",required:!0,valid_types:"StructureData",info:"relaxed structure"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The retrieved folder data node could not be accessed."},{status:101,message:"Input structure could not be processed."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."}]},class:"aiida_supercell.calculations:SupercellCalculation"}},"aiida.parsers":{supercell:"aiida_supercell.parsers:SupercellParser"}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install git+https://github.com/pzarabadip/aiida-supercell",is_installable:"True"},"aiida-symmetry-representation":{code_home:"https://github.com/greschd/aiida_symmetry_representation",documentation_url:"https://aiida-symmetry-representation.readthedocs.io",entry_point_prefix:"symmetry_representation",pip_url:"aiida-symmetry-representation",name:"aiida-symmetry-representation",package_name:"aiida_symmetry_representation",hosted_on:"github.com",metadata:{description:"AiiDA Plugin for symmetry representations.",author:"Dominik Gresch",author_email:"greschd@gmx.ch",license:"Apache 2.0",home_page:"https://aiida-symmetry-representation.readthedocs.io",classifiers:["Development Status :: 5 - Production/Stable","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: Apache Software License","Programming Language :: Python :: 3","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Topic :: Scientific/Engineering :: Physics"],version:"0.2.0"},aiida_version:null,entry_points:{"aiida.calculations":{"symmetry_representation.filter_symmetries":{description:["Calculation class to run the ``symmetry-repr filter_symmetries`` command."],spec:{inputs:[{name:"structure",required:!0,valid_types:"StructureData",info:"Structure with which the filtered symmetries should be compatible."},{name:"symmetries",required:!0,valid_types:"SinglefileData",info:"File containing the symmetries (in HDF5 format)."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"symmetries",required:!0,valid_types:"SinglefileData",info:"The HDF5 file containing the symmetries which are compatible with the structure."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"}]},class:"aiida_symmetry_representation.calculations.filter_symmetries:FilterSymmetriesCalculation"}},"aiida.parsers":{"symmetry_representation.symmetry":"aiida_symmetry_representation.parsers.symmetries:SymmetriesParser"}},commits_count:0,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install aiida-symmetry-representation",is_installable:"True"},"aiida-tbextraction":{code_home:"https://github.com/greschd/aiida-tbextraction",documentation_url:"https://aiida-tbextraction.readthedocs.io/",entry_point_prefix:"tbextraction",pip_url:"aiida-tbextraction",name:"aiida-tbextraction",package_name:"aiida_tbextraction",hosted_on:"github.com",metadata:{description:"AiiDA Plugin for extracting tight-binding models",author:"Dominik Gresch",author_email:"greschd@gmx.ch",license:"Apache 2.0",home_page:"https://aiida-tbextraction.readthedocs.io",classifiers:["Development Status :: 4 - Beta","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: Apache Software License","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Topic :: Scientific/Engineering :: Physics"],version:"0.2.0b1"},aiida_version:null,entry_points:{"aiida.workflows":{"tbextraction.fp_run.base":"aiida_tbextraction.fp_run:FirstPrinciplesRunBase","tbextraction.fp_run.reference_bands.base":"aiida_tbextraction.fp_run.reference_bands:ReferenceBandsBase","tbextraction.fp_run.wannier_input.base":"aiida_tbextraction.fp_run.wannier_input:WannierInputBase","tbextraction.calculate_tb":"aiida_tbextraction.calculate_tb:TightBindingCalculation","tbextraction.model_evaluation.base":"aiida_tbextraction.model_evaluation:ModelEvaluationBase","tbextraction.model_evaluation.band_difference":"aiida_tbextraction.model_evaluation:BandDifferenceModelEvaluation","tbextraction.energy_windows.run_window":"aiida_tbextraction.energy_windows.run_window:RunWindow","tbextraction.energy_windows.window_search":"aiida_tbextraction.energy_windows.window_search:WindowSearch","tbextraction.optimize_fp_tb":"aiida_tbextraction.optimize_fp_tb:OptimizeFirstPrinciplesTightBinding","tbextraction.optimize_strained_fp_tb":"aiida_tbextraction.optimize_strained_fp_tb:OptimizeStrainedFirstPrinciplesTightBinding"}},commits_count:0,development_status:"beta",summaryinfo:[{colorclass:"green",text:"Workflows",count:10}],pip_install_cmd:"pip install --pre aiida-tbextraction",is_installable:"True"},"aiida-tbmodels":{code_home:"https://github.com/greschd/aiida-tbmodels",documentation_url:"https://aiida-tbmodels.readthedocs.io",entry_point_prefix:"tbmodels",pip_url:"aiida-tbmodels",name:"aiida-tbmodels",package_name:"aiida_tbmodels",hosted_on:"github.com",metadata:{description:"AiiDA Plugin for running TBmodels",author:"Dominik Gresch",author_email:"greschd@gmx.ch",license:"Apache 2.0",home_page:"https://aiida-tbmodels.readthedocs.io",classifiers:["Development Status :: 3 - Alpha","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: Apache Software License","Programming Language :: Python :: 3","Programming Language :: Python :: 3.6","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Topic :: Scientific/Engineering :: Physics"],version:"0.3.0"},aiida_version:null,entry_points:{"aiida.calculations":{"tbmodels.eigenvals":{description:["Calculation class for the 'tbmodels eigenvals' command, which computes the eigenvalues from a given tight-binding model."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"kpoints",required:!0,valid_types:"KpointsData",info:"Kpoints for which the eigenvalues are calculated."},{name:"tb_model",required:!0,valid_types:"SinglefileData",info:"Input model in TBmodels HDF5 format."},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"bands",required:!0,valid_types:"BandsData",info:"The calculated eigenvalues of the model at given k-points."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:300,message:"The result HDF5 file was not found."},{status:301,message:"The standard error file contains an unknown TBmodels exception."}]},class:"aiida_tbmodels.calculations.eigenvals:EigenvalsCalculation"},"tbmodels.parse":{description:["Calculation plugin for the 'tbmodels parse' command, which creates a"," TBmodels tight-binding model from the Wannier90 output."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"wannier_folder",required:!0,valid_types:"FolderData",info:"Folder containing the Wannier90 output data."},{name:"distance_ratio_threshold",required:!1,valid_types:"Float",info:"Determines the minimum ratio between nearest and next-nearest atom when parsing with 'nearest_atom' mode."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"pos_kind",required:!1,valid_types:"Str",info:"Determines how the orbital positions are parsed."},{name:"sparsity",required:!1,valid_types:"Str",info:"Set the sparsity of the output model. Requires TBmodels version >=1.4."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"tb_model",required:!0,valid_types:"SinglefileData",info:"Output model in TBmodels HDF5 format."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:300,message:"The output model HDF5 file was not found."},{status:301,message:"The standard error file contains an unknown TBmodels exception."},{status:301,message:"The seedname_wsvec.dat file is empty or incomplete."},{status:401,message:"The nearest atom to use for position parsing is ambiguous."}]},class:"aiida_tbmodels.calculations.parse:ParseCalculation"},"tbmodels.slice":{description:["Calculation plugin for the 'tbmodels slice' command, which re-orders or slices orbitals of a tight-binding model."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"slice_idx",required:!0,valid_types:"List",info:"Indices of the orbitals which are sliced from the model."},{name:"tb_model",required:!0,valid_types:"SinglefileData",info:"Input model in TBmodels HDF5 format."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"sparsity",required:!1,valid_types:"Str",info:"Set the sparsity of the output model. Requires TBmodels version >=1.4."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"tb_model",required:!0,valid_types:"SinglefileData",info:"Output model in TBmodels HDF5 format."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:300,message:"The output model HDF5 file was not found."},{status:301,message:"The standard error file contains an unknown TBmodels exception."}]},class:"aiida_tbmodels.calculations.slice:SliceCalculation"},"tbmodels.symmetrize":{description:["Calculation class for the 'tbmodels symmetrize' command, which creates a symmetrized tight-binding model from a tight-binding model and symmetry representations."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"symmetries",required:!0,valid_types:"SinglefileData",info:"File containing the symmetries in HDF5 format."},{name:"tb_model",required:!0,valid_types:"SinglefileData",info:"Input model in TBmodels HDF5 format."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"sparsity",required:!1,valid_types:"Str",info:"Set the sparsity of the output model. Requires TBmodels version >=1.4."}],outputs:[{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"tb_model",required:!0,valid_types:"SinglefileData",info:"Output model in TBmodels HDF5 format."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:300,message:"The output model HDF5 file was not found."},{status:301,message:"The standard error file contains an unknown TBmodels exception."},{status:301,message:"The type of the given symmetries object is incorrect."}]},class:"aiida_tbmodels.calculations.symmetrize:SymmetrizeCalculation"}},"aiida.parsers":{"tbmodels.model":"aiida_tbmodels.parsers.model:ModelParser"}},commits_count:0,development_status:"alpha",summaryinfo:[{colorclass:"blue",text:"Calculations",count:4},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install aiida-tbmodels",is_installable:"True"},"aiida-tcod":{code_home:"https://github.com/aiidateam/aiida-tcod",development_status:"beta",entry_point_prefix:"tcod",pip_url:"git+https://github.com/aiidateam/aiida-tcod",name:"aiida-tcod",package_name:"aiida_tcod",hosted_on:"github.com",metadata:{author:"The AiiDA team",author_email:"developers@aiida.net",version:"0.1.0a0",description:"AiiDA plugin to interact with the TCOD",classifiers:["Programming Language :: Python"]},aiida_version:">=1.0.0b1",entry_points:{"aiida.tools.dbexporters":{tcod:"aiida.tools.dbexporters.tcod"}},commits_count:0,summaryinfo:[{colorclass:"orange",text:"Other (Database Exporters)",count:1}],pip_install_cmd:"pip install git+https://github.com/aiidateam/aiida-tcod",is_installable:"True"},"aiida-uppasd":{code_home:"https://github.com/uppasd/aiida-uppasd",documentation_url:"https://github.com/uppasd/aiida-uppasd/blob/master/README.md",entry_point_prefix:"uppasd",pip_url:"git+https://github.com/unkcpz/aiida-uppasd",name:"aiida-uppasd",package_name:"aiida_uppasd",hosted_on:"github.com",metadata:{author:"Qichen Xu, Anders Bergman, Anna Delin, Jonathan Chico",author_email:"qichenx@kth.se",version:"0.1.0",description:"Interface for UppASD and AiiDA",classifiers:["Programming Language :: Python","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Framework :: AiiDA"]},aiida_version:">=1.1.0,<2.0.0",entry_points:{"aiida.calculations":{UppASD_core_calculations:"UppASD_AiiDA.calculations.core_calcs:UppASD"},"aiida.parsers":{UppASD_core_parsers:"UppASD_AiiDA.parsers.core_parser:SpinDynamic_core_parser"}},commits_count:0,development_status:"planning",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install git+https://github.com/unkcpz/aiida-uppasd"},"aiida-vasp":{code_home:"https://github.com/aiida-vasp/aiida-vasp",documentation_url:"https://aiida-vasp.readthedocs.io/",entry_point_prefix:"vasp",pip_url:"aiida-vasp",plugin_info:"https://raw.githubusercontent.com/aiida-vasp/aiida-vasp/master/setup.json",name:"aiida-vasp",package_name:"aiida_vasp",hosted_on:"github.com",metadata:{description:"AiiDA plugin for running VASP calculations and workflows.",author_email:"Espen Flage-Larsen ",classifiers:["Development Status :: 5 - Production/Stable","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Chemistry","Topic :: Scientific/Engineering :: Physics"],version:"3.0.1"},aiida_version:">=2.4,<3.0",entry_points:{"aiida.calculations":{"vasp.immigrant":{description:["Parse VASP output objects stored in a specified directory.",""," Simulate running the VaspCalculation up to the point where it can be"," retrieved and parsed, then hand over control to the runner for the rest.",""," Usage examples"," --------------"," Immigrant calculation can be perfomed as follows.",""," ::",""," code = Code.get_from_string('vasp@local')"," folder = '/home/username/vasp-calc-dir'"," settings = {'parser_settings': {'add_energies': True,"," 'add_forces': True,"," 'electronic_step_energies': True}}"," VaspImmigrant = CalculationFactory('vasp.immigrant')"," builder = VaspImmigrant.get_builder_from_folder(code,"," folder,"," settings=settings)"," submit(builder)",""," Instead of ``builder``, inputs dict is obtained similarly as",""," ::",""," code = Code.get_from_string('vasp@local')"," folder = '/home/username/vasp-calc-dir'"," settings = {'parser_settings': {'add_energies': True,"," 'add_forces': True,"," 'electronic_step_energies': True}}"," VaspImmigrant = CalculationFactory('vasp.immigrant')"," inputs = VaspImmigrant.get_inputs_from_folder(code,"," folder,"," settings=settings)"," submit(VaspImmigrant, **inputs)",""," Note"," ----"," The defaul metadata is set automatically as follows::",""," {'options': {'max_wallclock_seconds': 1,"," 'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1}}}",""," Specific scheduler may require setting ``resources`` differently"," (e.g., sge ``'parallel_env'``).",""," ``get_inputs_from_folder`` and ``get_builder_from_folder`` accept several"," kwargs, see the docstring of ``get_inputs_from_folder``."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"kpoints",required:!0,valid_types:"KpointsData",info:"The kpoints to use (KPOINTS)."},{name:"parameters",required:!0,valid_types:"Dict",info:"The VASP input parameters (INCAR)."},{name:"potential",required:!0,valid_types:"PotcarData",info:"The potentials (POTCAR)."},{name:"structure",required:!0,valid_types:"StructureData, CifData",info:"The input structure (POSCAR)."},{name:"charge_density",required:!1,valid_types:"ChargedensityData",info:"The charge density. (CHGCAR)"},{name:"dynamics",required:!1,valid_types:"Dict",info:"The VASP parameters related to ionic dynamics, e.g. flags to set the selective dynamics"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"remote_workdir",required:!1,valid_types:"str",info:""},{name:"restart_folder",required:!1,valid_types:"RemoteData",info:"A remote folder to restart from if need be"},{name:"settings",required:!1,valid_types:"Dict",info:"Additional parameters not related to VASP itself."},{name:"wavefunctions",required:!1,valid_types:"WavefunData",info:"The wave function coefficients. (WAVECAR)"}],outputs:[{name:"custom_outputs",required:!0,valid_types:"",info:""},{name:"misc",required:!0,valid_types:"Dict",info:"The output parameters containing smaller quantities that do not depend on system size."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"bands",required:!1,valid_types:"BandsData",info:"The output band structure."},{name:"born_charges",required:!1,valid_types:"ArrayData",info:"The output Born effective charges."},{name:"charge_density",required:!1,valid_types:"ArrayData",info:"The output charge density."},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:"The output charge density CHGCAR file."},{name:"dielectrics",required:!1,valid_types:"ArrayData",info:"The output dielectric functions."},{name:"dos",required:!1,valid_types:"ArrayData",info:"The output dos."},{name:"dynmat",required:!1,valid_types:"ArrayData",info:"The output dynamical matrix."},{name:"energies",required:!1,valid_types:"ArrayData",info:"The output total energies."},{name:"forces",required:!1,valid_types:"ArrayData",info:"The output forces."},{name:"hessian",required:!1,valid_types:"ArrayData",info:"The output Hessian matrix."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"The output k-points."},{name:"magnetization_density",required:!1,valid_types:"ArrayData",info:"The output magnetization density."},{name:"projectors",required:!1,valid_types:"ArrayData",info:"The output projectors of decomposition."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"The output of the site magnetization"},{name:"stress",required:!1,valid_types:"ArrayData",info:"The output stress."},{name:"structure",required:!1,valid_types:"StructureData",info:"The output structure."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The output trajectory data."},{name:"wavecar",required:!1,valid_types:"WavefunData",info:"The output plane wave coefficients file."}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:333,message:"VASP did not produce any output and did likely not execute properly."},{status:350,message:"the retrieved folder data node could not be accessed."},{status:351,message:"the retrieved_temporary folder data node could not be accessed."},{status:352,message:"an object that is marked by the parser as critical is missing."},{status:700,message:"Calculation did not reach the end of execution."},{status:701,message:"The electronic structure is not converged."},{status:702,message:"The ionic relaxation is not converged."},{status:703,message:"VASP calculation encountered a critical error: {error_message}."},{status:704,message:"Outputs for diagnosis are missing, please make sure `run_status` and `notifications` quantities are requested for parsing."},{status:1001,message:"parsing an object has failed."},{status:1002,message:"the parser is not able to parse the {quantity} quantity"},{status:1003,message:"the vasprun.xml was truncated and recovery parsing failed to parse at least one of the requested quantities: {quantities}, very likely the VASP calculation did not run properly"},{status:1004,message:"the parser is not able to compose one or more output nodes: {nodes}"},{status:1005,message:"Overflow detected in XML while parsing."}]},class:"aiida_vasp.calcs.immigrant:VaspImmigrant"},"vasp.neb":{description:["NEB calculations using VASP",""," ------------------------------------"," Calculations for performing NEB calculations."," NEB calculations requires standard VASP inputs, but POSCAR are placed in"," folder names 00, 01, 02... N for N-1 number of images.",""," Input frames should be placed under the ``neb_images`` input namespace as a dictionary like::"," {"," 'image_00': structure_1,"," 'image_01': structure_2"," ...."," }",""," Output of individual frames are placed in the corresponding namespace under the same convention."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"final_structure",required:!0,valid_types:"StructureData, CifData",info:"The input structure (POSCAR) for the final image."},{name:"initial_structure",required:!0,valid_types:"StructureData, CifData",info:"The input structure (POSCAR) for initial image."},{name:"kpoints",required:!0,valid_types:"KpointsData",info:"The kpoints to use (KPOINTS)."},{name:"neb_images",required:!0,valid_types:"StructureData, CifData",info:"Starting structure for the NEB images"},{name:"parameters",required:!0,valid_types:"Dict",info:"The VASP input parameters (INCAR)."},{name:"potential",required:!0,valid_types:"PotcarData",info:"The potentials (POTCAR)."},{name:"charge_density",required:!1,valid_types:"ChargedensityData",info:"The charge density. (CHGCAR)"},{name:"dynamics",required:!1,valid_types:"Dict",info:"The VASP parameters related to ionic dynamics, e.g. flags to set the selective dynamics"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"restart_folder",required:!1,valid_types:"RemoteData",info:"A remote folder to restart from if need be"},{name:"settings",required:!1,valid_types:"Dict",info:"Additional parameters not related to VASP itself."},{name:"wavefunctions",required:!1,valid_types:"WavefunData",info:"The wave function coefficients. (WAVECAR)"}],outputs:[{name:"custom_outputs",required:!0,valid_types:"",info:""},{name:"misc",required:!0,valid_types:"Dict",info:"Per-image misc output."},{name:"neb_misc",required:!0,valid_types:"Dict",info:"NEB related data combined for each image"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"structure",required:!0,valid_types:"StructureData",info:"NEB images"},{name:"bands",required:!1,valid_types:"BandsData",info:"The output band structure."},{name:"born_charges",required:!1,valid_types:"ArrayData",info:"The output Born effective charges."},{name:"charge_density",required:!1,valid_types:"ArrayData",info:"The output charge density."},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:"The output charge density."},{name:"dielectrics",required:!1,valid_types:"ArrayData",info:"The output dielectric functions."},{name:"dos",required:!1,valid_types:"ArrayData",info:"The output dos."},{name:"dynmat",required:!1,valid_types:"ArrayData",info:"The output dynamical matrix."},{name:"energies",required:!1,valid_types:"ArrayData",info:"The output total energies."},{name:"forces",required:!1,valid_types:"ArrayData",info:"The output forces."},{name:"hessian",required:!1,valid_types:"ArrayData",info:"The output Hessian matrix."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"Kpoints for each image."},{name:"magnetization_density",required:!1,valid_types:"ArrayData",info:"The output magnetization density."},{name:"projectors",required:!1,valid_types:"ArrayData",info:"The output projectors of decomposition."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"The output of the site magnetization for each image."},{name:"stress",required:!1,valid_types:"ArrayData",info:"The output stress."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The output trajectory data."},{name:"wavecar",required:!1,valid_types:"WavefunData",info:"The output file containing the plane wave coefficients."}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:333,message:"VASP did not produce any output files and did likely not execute properly."},{status:350,message:"the retrieved folder data node could not be accessed."},{status:351,message:"the retrieved_temporary folder data node could not be accessed."},{status:352,message:"an object that is marked by the parser as critical is missing."},{status:352,message:"a file that is marked by the parser as critical is missing."},{status:700,message:"Calculation did not reach the end of execution."},{status:701,message:"The electronic structure is not converged."},{status:702,message:"The ionic relaxation is not converged."},{status:703,message:"VASP calculation encountered a critical error: {error_message}."},{status:704,message:"Outputs for diagnosis are missing, please make sure the `neb_data` and `run_status` quantities are requested for parsing."},{status:1001,message:"parsing an object has failed."},{status:1001,message:"parsing a file has failed."},{status:1002,message:"the parser is not able to parse the {quantity} quantity"},{status:1003,message:"the vasprun.xml was truncated and recovery parsing failed to parse at least one of the requested quantities: {quantities}, very likely the VASP calculation did not run properly"},{status:1004,message:"the parser is not able to compose one or more output nodes: {nodes}"},{status:1005,message:"Overflow detected in XML while parsing."}]},class:"aiida_vasp.calcs.neb:VaspNEBCalculation"},"vasp.vasp":{description:["General-purpose VASP calculation.",""," ---------------------------------"," By default retrieves only the 'OUTCAR', 'vasprun.xml', 'EIGENVAL', 'DOSCAR'"," and Wannier90 input / output objects. These objects are deleted after parsing."," Additional retrieve objects can be specified via the"," ``settings['ADDITIONAL_RETRIEVE_TEMPORARY_LIST']`` input. In addition, if you want to keep"," any objects after parsing, put them in ``settings['ADDITIONAL_RETRIEVE_LIST']`` which is empty"," by default.",""," Floating point precision for writing POSCAR objects can be adjusted using"," ``settings['poscar_precision']``, default: 10",""," The following assumes you are familiar with the AiiDA data structures and"," how to set up and run an AiiDA calculation in general.",""," Example usage::",""," from aiida.orm import CalculationFactory, DataFactory"," from aiida.work import submit",""," proc = CalculationFactory('vasp.vasp').process()"," inputs = proc.get_inputs_template()"," inputs.parameter = "," inputs.structure = "," inputs.kpoints = "," inputs.settings = "," inputs.potential = DataFactory('vasp.potcar').get_potcars_from_structure(structure, ...)"," inputs.code = ",""," submit(proc, **inputs)",""," Which is very similar to the workchain example.",""," Since we do not want the content parsers to know about the AiiDA infrastructure,"," i.e. processes etc. we have no access to the exit codes defined on the CalcJob."," We thus have to deal with failures in parsing directly in the write calls here."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"kpoints",required:!0,valid_types:"KpointsData",info:"The kpoints to use (KPOINTS)."},{name:"parameters",required:!0,valid_types:"Dict",info:"The VASP input parameters (INCAR)."},{name:"potential",required:!0,valid_types:"PotcarData",info:"The potentials (POTCAR)."},{name:"structure",required:!0,valid_types:"StructureData, CifData",info:"The input structure (POSCAR)."},{name:"charge_density",required:!1,valid_types:"ChargedensityData",info:"The charge density. (CHGCAR)"},{name:"dynamics",required:!1,valid_types:"Dict",info:"The VASP parameters related to ionic dynamics, e.g. flags to set the selective dynamics"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"restart_folder",required:!1,valid_types:"RemoteData",info:"A remote folder to restart from if need be"},{name:"settings",required:!1,valid_types:"Dict",info:"Additional parameters not related to VASP itself."},{name:"wavefunctions",required:!1,valid_types:"WavefunData",info:"The wave function coefficients. (WAVECAR)"}],outputs:[{name:"custom_outputs",required:!0,valid_types:"",info:""},{name:"misc",required:!0,valid_types:"Dict",info:"The output parameters containing smaller quantities that do not depend on system size."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"bands",required:!1,valid_types:"BandsData",info:"The output band structure."},{name:"born_charges",required:!1,valid_types:"ArrayData",info:"The output Born effective charges."},{name:"charge_density",required:!1,valid_types:"ArrayData",info:"The output charge density."},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:"The output charge density CHGCAR file."},{name:"dielectrics",required:!1,valid_types:"ArrayData",info:"The output dielectric functions."},{name:"dos",required:!1,valid_types:"ArrayData",info:"The output dos."},{name:"dynmat",required:!1,valid_types:"ArrayData",info:"The output dynamical matrix."},{name:"energies",required:!1,valid_types:"ArrayData",info:"The output total energies."},{name:"forces",required:!1,valid_types:"ArrayData",info:"The output forces."},{name:"hessian",required:!1,valid_types:"ArrayData",info:"The output Hessian matrix."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"The output k-points."},{name:"magnetization_density",required:!1,valid_types:"ArrayData",info:"The output magnetization density."},{name:"projectors",required:!1,valid_types:"ArrayData",info:"The output projectors of decomposition."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"The output of the site magnetization"},{name:"stress",required:!1,valid_types:"ArrayData",info:"The output stress."},{name:"structure",required:!1,valid_types:"StructureData",info:"The output structure."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The output trajectory data."},{name:"wavecar",required:!1,valid_types:"WavefunData",info:"The output plane wave coefficients file."}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:333,message:"VASP did not produce any output and did likely not execute properly."},{status:350,message:"the retrieved folder data node could not be accessed."},{status:351,message:"the retrieved_temporary folder data node could not be accessed."},{status:352,message:"an object that is marked by the parser as critical is missing."},{status:700,message:"Calculation did not reach the end of execution."},{status:701,message:"The electronic structure is not converged."},{status:702,message:"The ionic relaxation is not converged."},{status:703,message:"VASP calculation encountered a critical error: {error_message}."},{status:704,message:"Outputs for diagnosis are missing, please make sure `run_status` and `notifications` quantities are requested for parsing."},{status:1001,message:"parsing an object has failed."},{status:1002,message:"the parser is not able to parse the {quantity} quantity"},{status:1003,message:"the vasprun.xml was truncated and recovery parsing failed to parse at least one of the requested quantities: {quantities}, very likely the VASP calculation did not run properly"},{status:1004,message:"the parser is not able to compose one or more output nodes: {nodes}"},{status:1005,message:"Overflow detected in XML while parsing."}]},class:"aiida_vasp.calcs.vasp:VaspCalculation"},"vasp.vasp2w90":"aiida_vasp.calcs.vasp2w90:Vasp2w90Calculation"},"aiida.cmdline.data":{"vasp-potcar":"aiida_vasp.commands.potcar:potcar"},"aiida.data":{"vasp.archive":"aiida_vasp.data.archive:ArchiveData","vasp.chargedensity":"aiida_vasp.data.chargedensity:ChargedensityData","vasp.potcar":"aiida_vasp.data.potcar:PotcarData","vasp.potcar_file":"aiida_vasp.data.potcar:PotcarFileData","vasp.wavefun":"aiida_vasp.data.wavefun:WavefunData"},"aiida.groups":{"vasp.potcar":"aiida_vasp.data.potcar:PotcarGroup"},"aiida.parsers":{"vasp.neb":"aiida_vasp.parsers.neb:VtstNebParser","vasp.vasp":"aiida_vasp.parsers.vasp:VaspParser","vasp.vasp2w90":"aiida_vasp.parsers.vasp2w90:Vasp2w90Parser"},"aiida.workflows":{"vasp.bands":{description:["Extract the band structure using k-point paths fetched from SeeKpath."],spec:{inputs:[{name:"bands",required:!0,valid_types:"",info:""},{name:"code",required:!0,valid_types:"Code",info:""},{name:"dynamics",required:!0,valid_types:"",info:""},{name:"options",required:!0,valid_types:"Dict",info:""},{name:"potential_family",required:!0,valid_types:"Str",info:""},{name:"potential_mapping",required:!0,valid_types:"Dict",info:""},{name:"restart_folder",required:!0,valid_types:"RemoteData",info:` + The folder to restart in, which contains the outputs from the prerun to extract the charge density. + `},{name:"smearing",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"StructureData, CifData",info:""},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:` + If True, clean the work dir upon the completion of a successfull calculation. + `},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"max_iterations",required:!1,valid_types:"Int",info:` + The maximum number of iterations to perform. + `},{name:"metadata",required:!1,valid_types:"",info:""},{name:"parameters",required:!1,valid_types:"Dict",info:""},{name:"settings",required:!1,valid_types:"Dict",info:""},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"Site magnetization to be used as MAGMOM"},{name:"verbose",required:!1,valid_types:"Bool",info:` + If True, enable more detailed output during workchain execution. + `},{name:"wavecar",required:!1,valid_types:"WavefunData",info:""}],outputs:[{name:"bands",required:!0,valid_types:"BandsData",info:""},{name:"custom_outputs",required:!0,valid_types:"",info:""},{name:"misc",required:!0,valid_types:"Dict",info:"The output parameters containing smaller quantities that do not depend on system size."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"born_charges",required:!1,valid_types:"ArrayData",info:"The output Born effective charges."},{name:"charge_density",required:!1,valid_types:"ArrayData",info:"The output charge density."},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:"The output charge density CHGCAR file."},{name:"dielectrics",required:!1,valid_types:"ArrayData",info:"The output dielectric functions."},{name:"dos",required:!1,valid_types:"ArrayData",info:"The output dos."},{name:"dynmat",required:!1,valid_types:"ArrayData",info:"The output dynamical matrix."},{name:"energies",required:!1,valid_types:"ArrayData",info:"The output total energies."},{name:"forces",required:!1,valid_types:"ArrayData",info:"The output forces."},{name:"hessian",required:!1,valid_types:"ArrayData",info:"The output Hessian matrix."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"The output k-points."},{name:"magnetization_density",required:!1,valid_types:"ArrayData",info:"The output magnetization density."},{name:"projectors",required:!1,valid_types:"ArrayData",info:"The output projectors of decomposition."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"The output of the site magnetization"},{name:"stress",required:!1,valid_types:"ArrayData",info:"The output stress."},{name:"structure",required:!1,valid_types:"StructureData",info:"The output structure."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The output trajectory data."},{name:"wavecar",required:!1,valid_types:"WavefunData",info:"The output plane wave coefficients file."}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:420,message:"no called workchain detected"},{status:500,message:"unknown error detected in the bands workchain"},{status:2001,message:"BandsData not found in exposed_ouputs"}]},class:"aiida_vasp.workchains.bands:BandsWorkChain"},"vasp.converge":{description:["A workchain to perform convergence tests."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:""},{name:"converge",required:!0,valid_types:"",info:""},{name:"dynamics",required:!0,valid_types:"",info:""},{name:"options",required:!0,valid_types:"Dict",info:""},{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"potential_family",required:!0,valid_types:"Str",info:""},{name:"potential_mapping",required:!0,valid_types:"Dict",info:""},{name:"structure",required:!0,valid_types:"StructureData, CifData",info:""},{name:"verify",required:!0,valid_types:"",info:""},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:` + If True, clean the work dir upon the completion of a successfull calculation. + `},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:""},{name:"max_iterations",required:!1,valid_types:"Int",info:` + The maximum number of iterations to perform. + `},{name:"metadata",required:!1,valid_types:"",info:""},{name:"relax",required:!1,valid_types:"",info:""},{name:"restart_folder",required:!1,valid_types:"RemoteData",info:` + The restart folder from a previous workchain run that is going to be used. + `},{name:"settings",required:!1,valid_types:"Dict",info:""},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"Site magnetization to be used as MAGMOM"},{name:"verbose",required:!1,valid_types:"Bool",info:` + If True, enable more detailed output during workchain execution. + `},{name:"wavecar",required:!1,valid_types:"WavefunData",info:""}],outputs:[{name:"converge",required:!0,valid_types:"",info:""},{name:"custom_outputs",required:!0,valid_types:"",info:""},{name:"misc",required:!0,valid_types:"Dict",info:"The output parameters containing smaller quantities that do not depend on system size."},{name:"relax",required:!0,valid_types:"",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"bands",required:!1,valid_types:"BandsData",info:"The output band structure."},{name:"born_charges",required:!1,valid_types:"ArrayData",info:"The output Born effective charges."},{name:"charge_density",required:!1,valid_types:"ArrayData",info:"The output charge density."},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:"The output charge density CHGCAR file."},{name:"dielectrics",required:!1,valid_types:"ArrayData",info:"The output dielectric functions."},{name:"dos",required:!1,valid_types:"ArrayData",info:"The output dos."},{name:"dynmat",required:!1,valid_types:"ArrayData",info:"The output dynamical matrix."},{name:"energies",required:!1,valid_types:"ArrayData",info:"The output total energies."},{name:"forces",required:!1,valid_types:"ArrayData",info:"The output forces."},{name:"hessian",required:!1,valid_types:"ArrayData",info:"The output Hessian matrix."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"The output k-points."},{name:"magnetization_density",required:!1,valid_types:"ArrayData",info:"The output magnetization density."},{name:"projectors",required:!1,valid_types:"ArrayData",info:"The output projectors of decomposition."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"The output of the site magnetization"},{name:"stress",required:!1,valid_types:"ArrayData",info:"The output stress."},{name:"structure",required:!1,valid_types:"StructureData",info:"The output structure."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The output trajectory data."},{name:"wavecar",required:!1,valid_types:"WavefunData",info:"The output plane wave coefficients file."}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:420,message:"no called workchain detected"},{status:500,message:"unknown error detected in the converge workchain"}]},class:"aiida_vasp.workchains.converge:ConvergeWorkChain"},"vasp.immigrant":{description:["Import a VASP run executed in the directory specified by folder_path."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:` + If True, clean the work dir upon the completion of a successfull calculation. + `},{name:"folder_path",required:!1,valid_types:"Str",info:"Deprecated."},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"max_iterations",required:!1,valid_types:"Int",info:` + The maximum number of iterations to perform. + `},{name:"metadata",required:!1,valid_types:"",info:""},{name:"options",required:!1,valid_types:"Dict",info:""},{name:"potential_family",required:!1,valid_types:"Str",info:""},{name:"potential_mapping",required:!1,valid_types:"Dict",info:""},{name:"remote_workdir",required:!1,valid_types:"str",info:""},{name:"settings",required:!1,valid_types:"Dict",info:""},{name:"use_chgcar",required:!1,valid_types:"Bool",info:` + If True, WavefunData (of WAVECAR) is attached. + `},{name:"use_wavecar",required:!1,valid_types:"Bool",info:` + If True, WavefunData (of WAVECAR) is attached. + `},{name:"verbose",required:!1,valid_types:"Bool",info:` + If True, enable more detailed output during workchain execution. + `}],outputs:[{name:"custom_outputs",required:!0,valid_types:"",info:""},{name:"misc",required:!0,valid_types:"Dict",info:"The output parameters containing smaller quantities that do not depend on system size."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"bands",required:!1,valid_types:"BandsData",info:"The output band structure."},{name:"born_charges",required:!1,valid_types:"ArrayData",info:"The output Born effective charges."},{name:"charge_density",required:!1,valid_types:"ArrayData",info:"The output charge density."},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:"The output charge density CHGCAR file."},{name:"dielectrics",required:!1,valid_types:"ArrayData",info:"The output dielectric functions."},{name:"dos",required:!1,valid_types:"ArrayData",info:"The output dos."},{name:"dynmat",required:!1,valid_types:"ArrayData",info:"The output dynamical matrix."},{name:"energies",required:!1,valid_types:"ArrayData",info:"The output total energies."},{name:"forces",required:!1,valid_types:"ArrayData",info:"The output forces."},{name:"hessian",required:!1,valid_types:"ArrayData",info:"The output Hessian matrix."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"The output k-points."},{name:"magnetization_density",required:!1,valid_types:"ArrayData",info:"The output magnetization density."},{name:"projectors",required:!1,valid_types:"ArrayData",info:"The output projectors of decomposition."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"The output of the site magnetization"},{name:"stress",required:!1,valid_types:"ArrayData",info:"The output stress."},{name:"structure",required:!1,valid_types:"StructureData",info:"The output structure."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The output trajectory data."},{name:"wavecar",required:!1,valid_types:"WavefunData",info:"The output plane wave coefficients file."}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."}]},class:"aiida_vasp.workchains.immigrant:VaspImmigrantWorkChain"},"vasp.master":{description:["The master workchain that selects sub workchains to perform necessary calculations."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:""},{name:"converge",required:!0,valid_types:"",info:""},{name:"dos",required:!0,valid_types:"",info:""},{name:"dynamics",required:!0,valid_types:"",info:""},{name:"options",required:!0,valid_types:"Dict",info:""},{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"potential_family",required:!0,valid_types:"Str",info:""},{name:"potential_mapping",required:!0,valid_types:"Dict",info:""},{name:"structure",required:!0,valid_types:"StructureData, CifData",info:""},{name:"verify",required:!0,valid_types:"",info:""},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:""},{name:"extract_bands",required:!1,valid_types:"Bool",info:` + Do you want to extract the band structure? + `},{name:"extract_dos",required:!1,valid_types:"Bool",info:` + Do you want to extract the density of states? + `},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:""},{name:"kpoints_distance",required:!1,valid_types:"Float",info:` + The maximum distance between k-points in inverse AA. + `},{name:"max_iterations",required:!1,valid_types:"Int",info:` + The maximum number of iterations to perform. + `},{name:"metadata",required:!1,valid_types:"",info:""},{name:"relax",required:!1,valid_types:"",info:""},{name:"restart_folder",required:!1,valid_types:"RemoteData",info:` + The restart folder from a previous workchain run that is going to be used. + `},{name:"settings",required:!1,valid_types:"Dict",info:""},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"Site magnetization to be used as MAGMOM"},{name:"verbose",required:!1,valid_types:"Bool",info:` + If True, enable more detailed output during workchain execution. + `},{name:"wavecar",required:!1,valid_types:"WavefunData",info:""}],outputs:[{name:"bands",required:!1,valid_types:"",info:""},{name:"dos",required:!1,valid_types:"",info:""}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:420,message:"no called workchain detected"},{status:500,message:"unknown error detected in the master workchain"}]},class:"aiida_vasp.workchains.master:MasterWorkChain"},"vasp.neb":{description:["The NEB workchain.",""," -------------------"," Error handling enriched wrapper around VaspNEBCalculation.",""," Deliberately conserves most of the interface (required inputs) of the VaspNEBCalculation class, but"," makes it possible for a user to interact with a workchain and not a calculation.",""," In addition, implement restarts of calculation when the calculation is net full converged for error handling."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"dynamics",required:!0,valid_types:"",info:""},{name:"final_structure",required:!0,valid_types:"StructureData, CifData",info:"The input structure (POSCAR) for the final image."},{name:"initial_structure",required:!0,valid_types:"StructureData, CifData",info:"The input structure (POSCAR) for initial image."},{name:"neb_images",required:!0,valid_types:"StructureData, CifData",info:"Starting structure for the NEB images"},{name:"options",required:!0,valid_types:"Dict",info:""},{name:"parameters",required:!0,valid_types:"Dict",info:"The VASP input parameters (INCAR)."},{name:"potential_family",required:!0,valid_types:"Str",info:""},{name:"potential_mapping",required:!0,valid_types:"Dict",info:""},{name:"charge_density",required:!1,valid_types:"ChargedensityData",info:"The charge density. (CHGCAR)"},{name:"clean_workdir",required:!1,valid_types:"Bool",info:` + If True, clean the work dir upon the completion of a successfull calculation. + `},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:""},{name:"kpoints_spacing",required:!1,valid_types:"Float",info:"Spacing for the kpoints in units A^-1 * 2pi (CASTEP style `kpoints_mp_spacing`)"},{name:"kpoints_spacing_vasp",required:!1,valid_types:"Float",info:"Spacing for the kpoints in units A^-1 (VASP style)"},{name:"ldau_mapping",required:!1,valid_types:"Dict",info:"Mappings, see the doc string of 'get_ldau_keys'"},{name:"max_iterations",required:!1,valid_types:"Int",info:` + The maximum number of iterations to perform. + `},{name:"metadata",required:!1,valid_types:"",info:""},{name:"restart_folder",required:!1,valid_types:"RemoteData",info:"A remote folder to restart from if need be"},{name:"settings",required:!1,valid_types:"Dict",info:"Additional parameters not related to VASP itself."},{name:"verbose",required:!1,valid_types:"Bool",info:` + If True, enable more detailed output during workchain execution. + `},{name:"wavefunctions",required:!1,valid_types:"WavefunData",info:"The wave function coefficients. (WAVECAR)"}],outputs:[{name:"custom_outputs",required:!0,valid_types:"",info:""},{name:"misc",required:!0,valid_types:"Dict",info:"Per-image misc output."},{name:"neb_misc",required:!0,valid_types:"Dict",info:"NEB related data combined for each image"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"structure",required:!0,valid_types:"StructureData",info:"NEB images"},{name:"bands",required:!1,valid_types:"BandsData",info:"The output band structure."},{name:"born_charges",required:!1,valid_types:"ArrayData",info:"The output Born effective charges."},{name:"charge_density",required:!1,valid_types:"ArrayData",info:"The output charge density."},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:"The output charge density."},{name:"dielectrics",required:!1,valid_types:"ArrayData",info:"The output dielectric functions."},{name:"dos",required:!1,valid_types:"ArrayData",info:"The output dos."},{name:"dynmat",required:!1,valid_types:"ArrayData",info:"The output dynamical matrix."},{name:"energies",required:!1,valid_types:"ArrayData",info:"The output total energies."},{name:"forces",required:!1,valid_types:"ArrayData",info:"The output forces."},{name:"hessian",required:!1,valid_types:"ArrayData",info:"The output Hessian matrix."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"Kpoints for each image."},{name:"magnetization_density",required:!1,valid_types:"ArrayData",info:"The output magnetization density."},{name:"projectors",required:!1,valid_types:"ArrayData",info:"The output projectors of decomposition."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"The output of the site magnetization for each image."},{name:"stress",required:!1,valid_types:"ArrayData",info:"The output stress."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The output trajectory data."},{name:"wavecar",required:!1,valid_types:"WavefunData",info:"The output file containing the plane wave coefficients."}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:401,message:"The maximum number of iterations was exceeded."},{status:402,message:"The process failed for an unknown reason, twice in a row."},{status:501,message:"Unrecoverable error in launched NEB calculations."},{status:700,message:"the user did not supply a potential family name"},{status:701,message:"ValueError was returned from get_potcars_from_structure"},{status:702,message:"the potential does not exist"},{status:703,message:"the exception: {exception} was thrown while massaging the parameters"}]},class:"aiida_vasp.workchains.neb:VaspNEBWorkChain"},"vasp.relax":{description:["Structure relaxation workchain."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:""},{name:"dynamics",required:!0,valid_types:"",info:""},{name:"kpoints",required:!0,valid_types:"KpointsData",info:""},{name:"options",required:!0,valid_types:"Dict",info:""},{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"potential_family",required:!0,valid_types:"Str",info:""},{name:"potential_mapping",required:!0,valid_types:"Dict",info:""},{name:"relax",required:!0,valid_types:"",info:""},{name:"structure",required:!0,valid_types:"StructureData, CifData",info:""},{name:"verify",required:!0,valid_types:"",info:""},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:` + If True, clean the work dir upon the completion of a successfull calculation. + `},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"max_iterations",required:!1,valid_types:"Int",info:` + The maximum number of iterations to perform. + `},{name:"metadata",required:!1,valid_types:"",info:""},{name:"restart_folder",required:!1,valid_types:"RemoteData",info:` + The restart folder from a previous workchain run that is going to be used. + `},{name:"settings",required:!1,valid_types:"Dict",info:""},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"Site magnetization to be used as MAGMOM"},{name:"verbose",required:!1,valid_types:"Bool",info:` + If True, enable more detailed output during workchain execution. + `},{name:"wavecar",required:!1,valid_types:"WavefunData",info:""}],outputs:[{name:"custom_outputs",required:!0,valid_types:"",info:""},{name:"misc",required:!0,valid_types:"Dict",info:"The output parameters containing smaller quantities that do not depend on system size."},{name:"relax",required:!0,valid_types:"",info:""},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"bands",required:!1,valid_types:"BandsData",info:"The output band structure."},{name:"born_charges",required:!1,valid_types:"ArrayData",info:"The output Born effective charges."},{name:"charge_density",required:!1,valid_types:"ArrayData",info:"The output charge density."},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:"The output charge density CHGCAR file."},{name:"dielectrics",required:!1,valid_types:"ArrayData",info:"The output dielectric functions."},{name:"dos",required:!1,valid_types:"ArrayData",info:"The output dos."},{name:"dynmat",required:!1,valid_types:"ArrayData",info:"The output dynamical matrix."},{name:"energies",required:!1,valid_types:"ArrayData",info:"The output total energies."},{name:"forces",required:!1,valid_types:"ArrayData",info:"The output forces."},{name:"hessian",required:!1,valid_types:"ArrayData",info:"The output Hessian matrix."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"The output k-points."},{name:"magnetization_density",required:!1,valid_types:"ArrayData",info:"The output magnetization density."},{name:"projectors",required:!1,valid_types:"ArrayData",info:"The output projectors of decomposition."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"The output of the site magnetization"},{name:"stress",required:!1,valid_types:"ArrayData",info:"The output stress."},{name:"structure",required:!1,valid_types:"StructureData",info:"The output structure."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The output trajectory data."},{name:"wavecar",required:!1,valid_types:"WavefunData",info:"The output plane wave coefficients file."}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:300,message:"the called workchain does not contain the necessary relaxed output structure"},{status:420,message:"no called workchain detected"},{status:500,message:"unknown error detected in the relax workchain"},{status:502,message:"there was an error overriding the parameters"}]},class:"aiida_vasp.workchains.relax:RelaxWorkChain"},"vasp.vasp":{description:["The VASP workchain.",""," -------------------"," Error handling enriched wrapper around VaspCalculation.",""," Deliberately conserves most of the interface (required inputs) of the VaspCalculation class, but"," makes it possible for a user to interact with a workchain and not a calculation.",""," This is intended to be used instead of directly submitting a VaspCalculation,"," so that future features like"," automatic restarting, error checking etc. can be propagated to higher level workchains"," automatically by implementing them here.",""," Handlers are implemented to try fix common problems and improves the robustness."," Individual handlers can be enabled/disabled by setting the ``handler_overrides`` input port.",' Additional settings may be passed under the "settings" input, which is also forwarded to the'," calculations. The avaliable options are:",""," - ``USE_WAVECAR_FOR_RESTART`` wether calculation restarts should use the WAVECAR. The default is ``True``.",""," Usage::",""," from aiida.common.extendeddicts import AttributeDict"," from aiida.work import submit"," basevasp = WorkflowFactory('vasp.vasp')"," inputs = basevasp.get_builder()"," inputs = AttributeDict()"," ## ... set inputs"," submit(basevasp, **inputs)",""," To see a working example, including generation of input nodes from scratch, please"," refer to ``examples/run_vasp_lean.py``."],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:""},{name:"dynamics",required:!0,valid_types:"",info:""},{name:"kpoints",required:!0,valid_types:"KpointsData",info:""},{name:"options",required:!0,valid_types:"Dict",info:""},{name:"parameters",required:!0,valid_types:"Dict",info:""},{name:"potential_family",required:!0,valid_types:"Str",info:""},{name:"potential_mapping",required:!0,valid_types:"Dict",info:""},{name:"structure",required:!0,valid_types:"StructureData, CifData",info:""},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:""},{name:"clean_workdir",required:!1,valid_types:"Bool",info:` + If True, clean the work dir upon the completion of a successfull calculation. + `},{name:"handler_overrides",required:!1,valid_types:"Dict",info:"Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated."},{name:"max_iterations",required:!1,valid_types:"Int",info:` + The maximum number of iterations to perform. + `},{name:"metadata",required:!1,valid_types:"",info:""},{name:"restart_folder",required:!1,valid_types:"RemoteData",info:` + The restart folder from a previous workchain run that is going to be used. + `},{name:"settings",required:!1,valid_types:"Dict",info:""},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"Site magnetization to be used as MAGMOM"},{name:"verbose",required:!1,valid_types:"Bool",info:` + If True, enable more detailed output during workchain execution. + `},{name:"wavecar",required:!1,valid_types:"WavefunData",info:""}],outputs:[{name:"custom_outputs",required:!0,valid_types:"",info:""},{name:"misc",required:!0,valid_types:"Dict",info:"The output parameters containing smaller quantities that do not depend on system size."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"bands",required:!1,valid_types:"BandsData",info:"The output band structure."},{name:"born_charges",required:!1,valid_types:"ArrayData",info:"The output Born effective charges."},{name:"charge_density",required:!1,valid_types:"ArrayData",info:"The output charge density."},{name:"chgcar",required:!1,valid_types:"ChargedensityData",info:"The output charge density CHGCAR file."},{name:"dielectrics",required:!1,valid_types:"ArrayData",info:"The output dielectric functions."},{name:"dos",required:!1,valid_types:"ArrayData",info:"The output dos."},{name:"dynmat",required:!1,valid_types:"ArrayData",info:"The output dynamical matrix."},{name:"energies",required:!1,valid_types:"ArrayData",info:"The output total energies."},{name:"forces",required:!1,valid_types:"ArrayData",info:"The output forces."},{name:"hessian",required:!1,valid_types:"ArrayData",info:"The output Hessian matrix."},{name:"kpoints",required:!1,valid_types:"KpointsData",info:"The output k-points."},{name:"magnetization_density",required:!1,valid_types:"ArrayData",info:"The output magnetization density."},{name:"projectors",required:!1,valid_types:"ArrayData",info:"The output projectors of decomposition."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"site_magnetization",required:!1,valid_types:"Dict",info:"The output of the site magnetization"},{name:"stress",required:!1,valid_types:"ArrayData",info:"The output stress."},{name:"structure",required:!1,valid_types:"StructureData",info:"The output structure."},{name:"trajectory",required:!1,valid_types:"TrajectoryData",info:"The output trajectory data."},{name:"wavecar",required:!1,valid_types:"WavefunData",info:"The output plane wave coefficients file."}],exit_codes:[{status:0,message:"the sun is shining"},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:300,message:"the calculation is missing at least one required output in the restart workchain"},{status:301,message:"The sub process excepted."},{status:302,message:"The sub process was killed."},{status:400,message:"the run_calculation step did not successfully add a calculation node to the context"},{status:401,message:"the maximum number of iterations was exceeded"},{status:402,message:"the calculation finished with an unexpected calculation state"},{status:403,message:"the calculation experienced and unexpected failure"},{status:404,message:"the calculation failed to submit, twice in a row"},{status:405,message:"the calculation failed for an unknown reason, twice in a row"},{status:500,message:"Missing critical output for inspecting the status of the calculation."},{status:501,message:"Cannot handle the error - inputs are likely need to be revised manually. Message: {message}"},{status:502,message:"Cannot handle the error - the last calculation did not reach the end of execution."},{status:503,message:"Cannot handle the error - the last calculation did not reach electronic convergence."},{status:504,message:"The ionic relaxation is not converged."},{status:505,message:"At least one of the ionic steps during the relaxation has did not have converged electronic structure."},{status:700,message:"the user did not supply a potential family name"},{status:701,message:"ValueError was returned from get_potcars_from_structure"},{status:702,message:"the potential does not exist"},{status:703,message:"the exception: {exception} was thrown while massaging the parameters"}]},class:"aiida_vasp.workchains.vasp:VaspWorkChain"}},console_scripts:{"mock-vasp":"aiida_vasp.commands.mock_vasp:mock_vasp","mock-vasp-strict":"aiida_vasp.commands.mock_vasp:mock_vasp_strict"}},commits_count:110,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:4},{colorclass:"brown",text:"Parsers",count:3},{colorclass:"red",text:"Data",count:5},{colorclass:"green",text:"Workflows",count:7},{colorclass:"purple",text:"Console scripts",count:2},{colorclass:"orange",text:"Other (Data commands, Groups)",count:2}],pip_install_cmd:"pip install aiida-vasp",is_installable:"True"},"aiida-wannier90":{code_home:"https://github.com/aiidateam/aiida-wannier90",documentation_url:"https://aiida-wannier90.readthedocs.io/",entry_point_prefix:"wannier90",pip_url:"aiida-wannier90",plugin_info:"https://raw.github.com/aiidateam/aiida-wannier90/master/setup.json",name:"aiida-wannier90",package_name:"aiida_wannier90",hosted_on:"github.com",metadata:{description:"AiiDA Plugin for the Wannier90 code",author:"Junfeng Qiao, Dominik Gresch, Antimo Marrazzo, Daniel Marchand, Giovanni Pizzi, Norma Rivano, The AiiDA team",classifiers:["Development Status :: 5 - Production/Stable","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Physics"],version:"2.1.0"},aiida_version:">=2.0,<3",entry_points:{"aiida.calculations":{"wannier90.postw90":{description:["Plugin for Wannier90.",""," Wannier90 is a code for computing maximally-localized Wannier functions."," See http://www.wannier.org/ for more details."],spec:{inputs:[{name:"parameters",required:!0,valid_types:"Dict",info:"Input parameters for the Wannier90 code"},{name:"parent_folder",required:!0,valid_types:"RemoteData",info:"Get input files (``.amn``, ``.mmn``, ...) from a class ``RemoteData`` possibly stored in a remote computer."},{name:"structure",required:!0,valid_types:"StructureData",info:"input crystal structure"},{name:"bands_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"A list of k-points along a path to be used for bands interpolation; it should contain `labels`. Specify either this or `kpoint_path`."},{name:"clean_workdir",required:!1,valid_types:"Bool",info:"If `True`, work directories of all called calculation jobs will be cleaned at the end of execution."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"kpoint_path",required:!1,valid_types:"Dict, NoneType",info:"Description of the k-points path to be used for bands interpolation; it should contain two properties: a list ``path`` of length-2 tuples with the labels of the endpoints of the path; and a dictionary ``point_coords`` giving the scaled coordinates for each high-symmetry endpoint."},{name:"kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"k-point mesh used in the NSCF calculation."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"projections",required:!1,valid_types:"OrbitalData, Dict, List, NoneType",info:"Starting projections for the Wannierisation procedure."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Additional settings to manage the Wannier90 calculation."}],outputs:[{name:"boltzwann",required:!0,valid_types:"",info:""},{name:"output_parameters",required:!0,valid_types:"Dict",info:"The ``output_parameters`` output node of the successful calculation."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"interpolated_bands",required:!1,valid_types:"BandsData",info:"The interpolated band structure by Wannier90 (if any)."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"The retrieved folder did not contain the required stdout output file."},{status:300,message:"A Wannier90 error file (.werr) has been found."},{status:400,message:'The string "Exiting..." has been found in the Wannier90 output (some partial output might have been parsed).'},{status:401,message:"An error related to bvectors has been found in the Wannier90 output."},{status:402,message:"Energy window contains fewer states than number of target WFs."},{status:403,message:"Error plotting Wanier functions in cube format."},{status:404,message:"The stdout output file was incomplete probably because the calculation got interrupted."},{status:405,message:"Some output files were missing probably because the calculation got interrupted."},{status:406,message:"The retrieved temporary folder could not be accessed."}]},class:"aiida_wannier90.calculations:Postw90Calculation"},"wannier90.wannier90":{description:["Plugin for Wannier90.",""," Wannier90 is a code for computing maximally-localized Wannier functions."," See http://www.wannier.org/ for more details."],spec:{inputs:[{name:"kpoints",required:!0,valid_types:"KpointsData",info:"k-point mesh used in the NSCF calculation."},{name:"parameters",required:!0,valid_types:"Dict",info:"Input parameters for the Wannier90 code"},{name:"structure",required:!0,valid_types:"StructureData",info:"input crystal structure"},{name:"bands_kpoints",required:!1,valid_types:"KpointsData, NoneType",info:"A list of k-points along a path to be used for bands interpolation; it should contain `labels`. Specify either this or `kpoint_path`."},{name:"code",required:!1,valid_types:"AbstractCode, NoneType",info:"The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run."},{name:"kpoint_path",required:!1,valid_types:"Dict, NoneType",info:"Description of the k-points path to be used for bands interpolation; it should contain two properties: a list ``path`` of length-2 tuples with the labels of the endpoints of the path; and a dictionary ``point_coords`` giving the scaled coordinates for each high-symmetry endpoint."},{name:"local_input_folder",required:!1,valid_types:"FolderData, NoneType",info:"Get input files (``.amn``, ``.mmn``, ...) from a class ``FolderData`` stored in the AiiDA repository."},{name:"metadata",required:!1,valid_types:"",info:""},{name:"monitors",required:!1,valid_types:"Dict",info:"Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job."},{name:"projections",required:!1,valid_types:"OrbitalData, Dict, List, NoneType",info:"Starting projections for the Wannierisation procedure."},{name:"remote_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual."},{name:"remote_input_folder",required:!1,valid_types:"RemoteData, NoneType",info:"Get input files (``.amn``, ``.mmn``, ...) from a class ``RemoteData`` possibly stored in a remote computer."},{name:"settings",required:!1,valid_types:"Dict, NoneType",info:"Additional settings to manage the Wannier90 calculation."}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"The ``output_parameters`` output node of the successful calculation."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"interpolated_bands",required:!1,valid_types:"BandsData",info:"The interpolated band structure by Wannier90 (if any)."},{name:"nnkp_file",required:!1,valid_types:"SinglefileData",info:"The ``.nnkp`` file, produced only in -pp (postproc) mode."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:131,message:"The specified account is invalid."},{status:140,message:"The node running the job failed."},{status:150,message:"{message}"},{status:200,message:"The retrieved folder data node could not be accessed."},{status:210,message:"The retrieved folder did not contain the required stdout output file."},{status:300,message:"A Wannier90 error file (.werr) has been found."},{status:400,message:'The string "Exiting..." has been found in the Wannier90 output (some partial output might have been parsed).'},{status:401,message:"An error related to bvectors has been found in the Wannier90 output."},{status:402,message:"Energy window contains fewer states than number of target WFs."},{status:403,message:"Error plotting Wanier functions in cube format."},{status:404,message:"The stdout output file was incomplete probably because the calculation got interrupted."}]},class:"aiida_wannier90.calculations:Wannier90Calculation"}},"aiida.parsers":{"wannier90.postw90":"aiida_wannier90.parsers:Postw90Parser","wannier90.wannier90":"aiida_wannier90.parsers:Wannier90Parser"},"aiida.workflows":{"wannier90.minimal":{description:["Workchain to run a full stack of Quantum ESPRESSO + Wannier90 for GaAs.",""," Note that this is mostly to be used as an example, as there is no"," error checking and runs directly Quantum ESPRESSO calculations rather"," than the base workflows."],spec:{inputs:[{name:"kpoint_path",required:!0,valid_types:"Dict",info:"The kpoints path for the NSCF run and Wannierisation."},{name:"kpoints_nscf",required:!0,valid_types:"KpointsData",info:"The kpoints for the NSCF run and Wannierisation."},{name:"kpoints_scf",required:!0,valid_types:"KpointsData",info:"The kpoints for the SCF run."},{name:"projections",required:!0,valid_types:"OrbitalData",info:"The projections for the Wannierisation."},{name:"pseudo_family",required:!0,valid_types:"Str",info:"The name of a pseudopotential family to use."},{name:"pw2wannier90_code",required:!0,valid_types:"Code",info:"The `pw2wannier90.x` code to use for the `Pw2Wannier90Calculation`s."},{name:"pw_code",required:!0,valid_types:"Code",info:"The `pw.x` code to use for the `PwCalculation`s."},{name:"structure",required:!0,valid_types:"StructureData",info:"The input structure."},{name:"wannier_code",required:!0,valid_types:"Code",info:"The `wannier90.x` code to use for the `Wannier90Calculation`s."},{name:"max_wallclock_seconds",required:!1,valid_types:"Int, NoneType",info:"Maximum wallclock time in seconds"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"num_machines",required:!1,valid_types:"Int, NoneType",info:"The number of machines (nodes) to use"}],outputs:[{name:"matrices_folder",required:!0,valid_types:"FolderData",info:""},{name:"nnkp_file",required:!0,valid_types:"SinglefileData",info:""},{name:"nscf_output",required:!0,valid_types:"Dict",info:""},{name:"p2wannier_output",required:!0,valid_types:"Dict",info:""},{name:"pw2wan_remote_folder",required:!0,valid_types:"RemoteData",info:""},{name:"scf_output",required:!0,valid_types:"Dict",info:""},{name:"wannier_bands",required:!0,valid_types:"BandsData",info:""}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."}]},class:"aiida_wannier90.workflows.minimal:MinimalW90WorkChain"}}},commits_count:20,development_status:"stable",summaryinfo:[{colorclass:"blue",text:"Calculations",count:2},{colorclass:"brown",text:"Parsers",count:2},{colorclass:"green",text:"Workflows",count:1}],pip_install_cmd:"pip install aiida-wannier90",is_installable:"True"},"aiida-wannier90-workflows":{code_home:"https://github.com/aiidateam/aiida-wannier90-workflows",development_status:"stable",entry_point_prefix:"wannier90_workflows",pip_url:"aiida-wannier90-workflows",plugin_info:"https://raw.github.com/aiidateam/aiida-wannier90-workflows/master/setup.json",name:"aiida-wannier90-workflows",package_name:"aiida_wannier90_workflows",hosted_on:"github.com",metadata:{description:"Advanced AiiDA workflows for Wannier90",author:"Junfeng Qiao, Antimo Marrazzo, Giovanni Pizzi",classifiers:["Development Status :: 5 - Production/Stable","Environment :: Plugins","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Operating System :: MacOS :: MacOS X","Operating System :: POSIX :: Linux","Programming Language :: Python","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Physics"],version:"2.1.0"},aiida_version:">=2.0,<3",entry_points:{"aiida.calculations":{"wannier90_workflows.split":"aiida_wannier90_workflows.calculations.split:Wannier90SplitCalculation"},"aiida.parsers":{"wannier90_workflows.split":"aiida_wannier90_workflows.parsers.split:Wannier90SplitParser"},"aiida.workflows":{"wannier90_workflows.bands":"aiida_wannier90_workflows.workflows.bands:Wannier90BandsWorkChain","wannier90_workflows.base.open_grid":"aiida_wannier90_workflows.workflows.base.open_grid:OpenGridBaseWorkChain","wannier90_workflows.base.projwfc":"aiida_wannier90_workflows.workflows.base.projwfc:ProjwfcBaseWorkChain","wannier90_workflows.base.pw2wannier90":"aiida_wannier90_workflows.workflows.base.pw2wannier90:Pw2wannier90BaseWorkChain","wannier90_workflows.base.wannier90":"aiida_wannier90_workflows.workflows.base.wannier90:Wannier90BaseWorkChain","wannier90_workflows.open_grid":"aiida_wannier90_workflows.workflows.open_grid:Wannier90OpenGridWorkChain","wannier90_workflows.optimize":"aiida_wannier90_workflows.workflows.optimize:Wannier90OptimizeWorkChain","wannier90_workflows.projwfcbands":"aiida_wannier90_workflows.workflows.projwfcbands:ProjwfcBandsWorkChain","wannier90_workflows.split":"aiida_wannier90_workflows.workflows.split:Wannier90SplitWorkChain","wannier90_workflows.wannier90":"aiida_wannier90_workflows.workflows.wannier90:Wannier90WorkChain"},console_scripts:{"aiida-wannier90-workflows":"aiida_wannier90_workflows.cli:cmd_root"}},commits_count:42,summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:10},{colorclass:"purple",text:"Console scripts",count:1}],pip_install_cmd:"pip install aiida-wannier90-workflows",is_installable:"True"},"aiida-wien2k":{code_home:"https://github.com/rubel75/aiida-wien2k",entry_point_prefix:"wien2k",name:"aiida-wien2k",package_name:"aiida_wien2k",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:1,development_status:"planning",summaryinfo:[],pip_install_cmd:"See source code repository."},"aiida-yambo":{code_home:"https://github.com/yambo-code/yambo-aiida/",development_status:"stable",entry_point_prefix:"yambo",pip_url:"aiida-yambo",plugin_info:"https://raw.github.com/yambo-code/yambo-aiida/master/setup.json",name:"aiida-yambo",package_name:"aiida_yambo",hosted_on:"github.com",metadata:{description:"YAMBO plugin and workflows for AiiDA",author:"Miki Bonacci, Michael Atambo, Antimo Marrazzo, Prandini Gianluca",author_email:"miki.bonacci@unimore.it",license:"MIT",home_page:"https://github.com/yambo-code/yambo-aiida",classifiers:["Environment :: Plugins","Framework :: AiiDA","License :: OSI Approved :: MIT License","Programming Language :: Python","Topic :: Scientific/Engineering :: Physics"],version:"1.3.0"},aiida_version:">=1.0.0a2",entry_points:{"aiida.calculations":{"yambo.yambo":{description:["AiiDA plugin for the Yambo code."," For more information, refer to http://www.yambo-code.org/"," https://github.com/yambo-code/yambo-aiida and http://aiida-yambo.readthedocs.io/en/latest/"],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"Use a main code for yambo calculation"},{name:"parameters",required:!0,valid_types:"Dict",info:"Use a node that specifies the input parameters"},{name:"parent_folder",required:!0,valid_types:"RemoteData",info:'Use a remote folder as parent folder (for "restarts and similar"'},{name:"settings",required:!0,valid_types:"Dict",info:"Use an additional node for special settings"},{name:"metadata",required:!1,valid_types:"",info:""},{name:"precode_parameters",required:!1,valid_types:"Dict",info:"Use a node that specifies the input parameters for the yambo precode"},{name:"preprocessing_code",required:!1,valid_types:"Code",info:"Use a preprocessing code for starting yambo"}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"returns the output parameters"},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"array_alpha",required:!1,valid_types:"ArrayData",info:"returns the alpha array"},{name:"array_alpha_array",required:!1,valid_types:"ArrayData",info:"returns the alpha array"},{name:"array_alpha_bands",required:!1,valid_types:"ArrayData",info:"returns the alpha array bands"},{name:"array_eels",required:!1,valid_types:"ArrayData",info:"returns the eels array"},{name:"array_eps",required:!1,valid_types:"ArrayData",info:"returns the eps array"},{name:"array_ndb",required:!1,valid_types:"ArrayData",info:"returns the array for ndb"},{name:"array_ndb_HFlocXC",required:!1,valid_types:"ArrayData",info:"returns the array ndb for HFlocXC"},{name:"array_ndb_QP",required:!1,valid_types:"ArrayData",info:"returns the array for ndbQP"},{name:"array_qp",required:!1,valid_types:"ArrayData",info:"returns the quasiparticle array band structure"},{name:"bands_quasiparticle",required:!1,valid_types:"BandsData",info:"returns the quasiparticle band structure"},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."},{name:"system_info",required:!1,valid_types:"Dict",info:"returns some system information after a p2y"}],exit_codes:[{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."},{status:500,message:"The retrieved folder data node could not be accessed."},{status:501,message:"time exceeded the max walltime"},{status:502,message:"failed calculation for some reason: could be a low number of conduction bands"},{status:503,message:"Unexpected behavior of YamboFolder"},{status:504,message:"parallelization error"},{status:505,message:"general memory error"},{status:506,message:"x_par allocation memory error"}]},class:"aiida_yambo.calculations.yambo:YamboCalculation"}},"aiida.data":{},"aiida.parsers":{"yambo.yambo":"aiida_yambo.parsers.parsers:YamboParser"}},commits_count:59,summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1}],pip_install_cmd:"pip install aiida-yambo",is_installable:"True"},"aiida-yambo-wannier90":{code_home:"https://github.com/aiidaplugins/aiida-yambo-wannier90",entry_point_prefix:"yambo_wannier90",pip_url:"aiida-yambo-wannier90",plugin_info:"https://raw.githubusercontent.com/aiidaplugins/aiida-yambo-wannier90/main/pyproject.toml",documentation_url:"https://aiida-yambo-wannier90.readthedocs.io/en/latest/",version_file:"https://raw.githubusercontent.com/aiidaplugins/aiida-yambo-wannier90/main/aiida_yambo_wannier90/__init__.py",name:"aiida-yambo-wannier90",package_name:"aiida_yambo_wannier90",hosted_on:"github.com",metadata:{description:"Plugin to combine Wannier90 interpolations with GW corrections computed by Yambo",author:"The AiiDA Team",classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Natural Language :: English","Programming Language :: Python"],version:"0.1.0b0"},aiida_version:">=1.6.4,<3",entry_points:{"aiida.calculations":{"yambo_wannier90.gw2wannier90":"aiida_yambo_wannier90.calculations.gw2wannier90:Gw2wannier90Calculation"},"aiida.parsers":{"yambo_wannier90.gw2wannier90":"aiida_yambo_wannier90.parsers.gw2wannier90:Gw2wannier90Parser"},"aiida.workflows":{yambo_wannier90:"aiida_yambo_wannier90.workflows:YamboWannier90WorkChain"},console_scripts:{"aiida-yambo-wannier90":"aiida_yambo_wannier90.cli:cmd_root"}},commits_count:0,development_status:"beta",summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"green",text:"Workflows",count:1},{colorclass:"purple",text:"Console scripts",count:1}],pip_install_cmd:"pip install --pre aiida-yambo-wannier90",is_installable:"True"},"aiida-yascheduler":{code_home:"https://github.com/tilde-lab/yascheduler",documentation_url:"https://github.com/tilde-lab/yascheduler",entry_point_prefix:"yascheduler",pip_url:"yascheduler",plugin_info:"https://raw.githubusercontent.com/tilde-lab/yascheduler/master/setup.json",name:"aiida-yascheduler",package_name:"aiida_yascheduler",hosted_on:"github.com",metadata:{description:"Yet another computing scheduler and cloud orchestration engine",author:"Andrey Sobolev",author_email:"Evgeny Blokhin , Sergei Korolev ",classifiers:["Development Status :: 4 - Beta","Framework :: AiiDA","Intended Audience :: Science/Research","License :: OSI Approved :: MIT License","Programming Language :: Python :: 3","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Topic :: Scientific/Engineering :: Chemistry","Topic :: Scientific/Engineering :: Information Analysis","Topic :: Scientific/Engineering :: Physics","Topic :: Software Development :: Libraries :: Python Modules"],version:"1.2.0"},aiida_version:null,entry_points:{"aiida.schedulers":{yascheduler:"yascheduler.aiida_plugin:YaScheduler"},console_scripts:{yainit:"yascheduler.utils:init",yanodes:"yascheduler.utils:show_nodes",yascheduler:"yascheduler.utils:daemonize",yasetnode:"yascheduler.utils:manage_node",yastatus:"yascheduler.utils:check_status",yasubmit:"yascheduler.utils:submit"}},commits_count:76,development_status:"beta",summaryinfo:[{colorclass:"purple",text:"Console scripts",count:6},{colorclass:"orange",text:"Other (Schedulers)",count:1}],pip_install_cmd:"pip install yascheduler",is_installable:"True"},"aiida-z2pack":{code_home:"https://github.com/AntimoMarrazzo/aiida-z2pack",entry_point_prefix:"z2pack",pip_url:"git+https://github.com/AntimoMarrazzo/aiida-z2pack",name:"aiida-z2pack",package_name:"aiida_z2pack",hosted_on:"github.com",metadata:{},aiida_version:null,entry_points:{},commits_count:18,development_status:"planning",summaryinfo:[],pip_install_cmd:"pip install git+https://github.com/AntimoMarrazzo/aiida-z2pack"},"aiida-zeopp":{code_home:"https://github.com/ltalirz/aiida-zeopp",development_status:"stable",entry_point_prefix:"zeopp",pip_url:"aiida-zeopp",name:"aiida-zeopp",package_name:"aiida_zeopp",hosted_on:"github.com",metadata:{description:"AiiDA plugin for zeo++",author:"Leopold Talirz",author_email:"leopold.talirz@epfl.ch",license:"Creative Commons",home_page:"https://github.com/ltalirz/aiida-zeopp",classifiers:["Framework :: AiiDA","Programming Language :: Python"],version:"1.1.2"},aiida_version:null,entry_points:{"aiida.calculations":{"zeopp.network":{description:["AiiDA calculation plugin for the zeo++ network binary"],spec:{inputs:[{name:"code",required:!0,valid_types:"Code",info:"The `Code` to use for this job."},{name:"parameters",required:!0,valid_types:"NetworkParameters",info:"command line parameters for zeo++"},{name:"structure",required:!0,valid_types:"CifData",info:"input structure to be analyzed"},{name:"atomic_radii",required:!1,valid_types:"SinglefileData",info:"atomic radii file"},{name:"metadata",required:!1,valid_types:"",info:""}],outputs:[{name:"output_parameters",required:!0,valid_types:"Dict",info:"key-value pairs parsed from zeo++ output file(s)."},{name:"remote_folder",required:!0,valid_types:"RemoteData",info:"Input files necessary to run the process will be stored in this folder node."},{name:"retrieved",required:!0,valid_types:"FolderData",info:"Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`."},{name:"block",required:!1,valid_types:"SinglefileData",info:"Blocked pockets fileoutput file."},{name:"remote_stash",required:!1,valid_types:"RemoteStashData",info:"Contents of the `stash.source_list` option are stored in this remote folder after job completion."}],exit_codes:[{status:0,message:"Calculation completed successfully."},{status:1,message:"The process has failed with an unspecified error."},{status:2,message:"The process failed with legacy failure mode."},{status:10,message:"The process returned an invalid output."},{status:11,message:"The process did not register a required output."},{status:100,message:"The process did not have the required `retrieved` output."},{status:101,message:"Not all expected output files were found."},{status:102,message:"Empty block file. This indicates the calculation of blocked pockets did not finish."},{status:110,message:"The job ran out of memory."},{status:120,message:"The job ran out of walltime."}]},class:"aiida_zeopp.calculations.network:NetworkCalculation"}},"aiida.parsers":{"zeopp.network":"aiida_zeopp.parsers.network:NetworkParser"},"aiida.data":{"zeopp.parameters":"aiida_zeopp.data.parameters:NetworkParameters"},console_scripts:{"zeopp-submit":"aiida_zeopp.console_scripts.data_cli:cli"},"aiida.workflows":{}},commits_count:0,summaryinfo:[{colorclass:"blue",text:"Calculations",count:1},{colorclass:"brown",text:"Parsers",count:1},{colorclass:"red",text:"Data",count:1},{colorclass:"purple",text:"Console scripts",count:1}],pip_install_cmd:"pip install aiida-zeopp",is_installable:"True"}},Uw=[{name:"Calculations",colorclass:"blue",num_entries:54,total_num:132},{name:"Parsers",colorclass:"brown",num_entries:55,total_num:110},{name:"Data",colorclass:"red",num_entries:29,total_num:101},{name:"Workflows",colorclass:"green",num_entries:39,total_num:130},{name:"Console scripts",colorclass:"purple",num_entries:16,total_num:27},{name:"Other",tooltip:"Aenet potentials, Calculations importers, Calculations monitors, ...",colorclass:"orange",num_entries:26,total_num:99}],Hw={planning:["Not yet ready to use. Developers welcome!","status-planning-d9644d.svg"],"pre-alpha":["Not yet ready to use. Developers welcome!","status-planning-d9644d.svg"],alpha:["Adds new functionality, not yet ready for production. Testing welcome!","status-alpha-d6af23.svg"],beta:["Adds new functionality, not yet ready for production. Testing welcome!","status-beta-d6af23.svg"],stable:["Ready for production calculations. Bug reports welcome!","status-stable-4cc61e.svg"],mature:["Ready for production calculations. Bug reports welcome!","status-stable-4cc61e.svg"],inactive:["No longer maintained.","status-inactive-bbbbbb.svg"]},Vw={"aiida.calculations":"CalcJobs and calculation functions","aiida.parsers":"CalcJob parsers","aiida.data":"Data node types","aiida.cmdline.data":"verdi data commands","aiida.groups":"Group types","aiida.workflows":"WorkChains and work functions","aiida.schedulers":"Job scheduler support","aiida.transports":"Data transport protocols","aiida.tests":"Development test modules","aiida.tools.dbexporters":"Support for exporting to external databases","aiida.tools.dbimporters":"Support for importing from external databases",console_scripts:"Console scripts"},Di={plugins:Ww,globalsummary:Uw,status_dict:Hw,entrypointtypes:Vw},Og="data%3Aimage%2Fpng%3Bbase64%2CiVBORw0KGgoAAAANSUhEUgAAACMAAAAhCAYAAABTERJSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAFhgAABYYBG6Yz4AAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAAAUbSURBVFiFzZhrbFRVEMd%2Fc%2B5uu6UUbIFC%2FUAUVEQCLbQJBIiBDyiImJiIhmohYNCkqJAQxASLF8tDgYRHBLXRhIcKNtFEhVDgAxBJqgmVh4JEKg3EIn2QYqBlt917xg%2BFss%2ByaDHOtzsz5z%2B%2FuZl7ztmF%2F5HJvxVQN6cPYX8%2FPLnOmsvNAvqfwuib%2FbNIk9cQeQnLcKRL5xLIV%2Fic9eJeunjPYbRs4FjQSpTB3aS1IpRKeeOOewajy%2FKKEO8Q0DuVdKy8IqsbPulxGHUfCBBu%2BwUYGuFuBTK7wQnht6PEbf4tlRomVRjCbXNjQEB0AyrFQOL5ENIJm7dTLZE6DPJCnEtFZVXDLny%2B4Sjv0PmmYu1ZdUek9RiMgoDmJ8V0L7XJqsZ3UW8YsBOwEeHeeFce7jEYXBy0m9m4BbXqSj2%2Bxnkg26MCVrN6DEZcwggtd8pTFx%2Fh3B9B50YLaFOPwXQKUt0tBLegtSomfBlfY13PwijbEnhztGzgJsK5h9W9qeWwBqjvyhB2iBs1Qz0AU974DciRGO8CVN8AJhAeMAdA3KbrKEtvxhsI%2B9emWiJlGBEU680Cfk%2BSsVqXZvcFYGXjF8ABVJ%2BTNfVXehyms1zzn1gmIOxLEB6E31%2FWBe5rnCarmo7elf7dJEeaLh80GasliI5F6Q9cAz1GY1OJVNDxTzQTw7iY%2FHEZRQY7xqJ9RU2LFe%2FYqakdP911ha0XhjjiTVAkDwgatWfCGeYocx8M3glG8g8EXhSrLrHnEFJ5Ymow%2FkhIYv6ttYUW1iFmEqqxdVoUs9FmsDYSqmtmJh3Cl1%2BVtl2s7owDUdocR5bceiyoSivGTT5vzpbzL1uoBpmcAAQgW7ArnKD9ng9rc%2BNgrobSNwpSkkhcRN%2BvmXLjIsDovYHHEfmsYFygPAnIDEQrQPzJYCOaLHLUfIt7Oq0LJn9fxkSgNCb1qEIQ5UKgT%2Fs6gJmVOOroJhQBXVqw118QtWLdyUxEP45sUpSzqP7RDdFYMyB9UReMiF1MzPwoUqHt8hjGFFeP5wZAbZ%2F0%2BcAtAAcji6LeSq%2FMYiAvSsdw3GtrfVSVFUBbIhwRWYR7yOcr%2FBi%2FB1MSJZ16JlgH1AGM3EO2QnmMyrSbTSiACgFBv4yCUapZkt9qwWVL7aeOyHvArJjm8%2Fz9BhdI4XcZgz2%2FvRALosjsk1ODOyMcJn9%2FYI6IrkS5vxMGdUwou2YKfyVqJpn5t9aNs3gbQMbdbkxnGdsr4bTHm2AxWo9yNZK4PXR3uzhAh%2BM0AZejnCrGdy0UvJxl0oMKgWSLR%2B1LH2aE9ViejiFs%2BXn6bTjng3MlIhJ1I1TkuLdg6OcAbD7Xx%2Bc3y9TrWAiSHqVkbZ2v9ilCo6s4AjwZCzFyD9mOL305nV9aonvsQeT2L0gVk4OwOJqXXVRW7naaxswDKVdlYLyMXAnntteYmws2xcVVZzq%2BtHPAooQggmJkc6TLSusOiL4RKgwzzYU1iFQgiUBA1H7E8yPau%2BZl9P7AblVNebtHqTgxLfRqrNvZWjsHZFuqMqKcDWdlFjF7UGvX8Jn24DyEAykJwNcdg0OvJ4p5pQ9tV6SMlP4A0PNh8aYze1ArROyUNTNouy8tNF3Rt0CSXb6bRFl4%2FIfQzNMjaE9WwpYOWQnOdEF%2BTdJNO0iFh7%2BI0kfORzQZb6P2kymS9oTxzBiM9rUqLWr1WE5G6ODhycQd%2FUnNVeMbcH68hYkGycNoUNWc8fxaxfwhDbHpfwM5oeTY7rUX8QAAAABJRU5ErkJggg%3D%3D";function D(){return D=Object.assign?Object.assign.bind():function(e){for(var t=1;t{t[a]=Bg(e[a])}),t}function Rt(e,t,a={clone:!0}){const i=a.clone?D({},e):e;return ri(e)&&ri(t)&&Object.keys(t).forEach(s=>{s!=="__proto__"&&(ri(t[s])&&s in e&&ri(e[s])?i[s]=Rt(e[s],t[s],a):a.clone?i[s]=ri(t[s])?Bg(t[s]):t[s]:i[s]=t[s])}),i}function Ha(e){let t="https://mui.com/production-error/?code="+e;for(let a=1;aa==null?t:function(...s){t.apply(this,s),a.apply(this,s)},()=>{})}function Ro(e,t=166){let a;function i(...s){const r=()=>{e.apply(this,s)};clearTimeout(a),a=setTimeout(r,t)}return i.clear=()=>{clearTimeout(a)},i}function Xw(e,t){return()=>null}function gn(e,t){return b.isValidElement(e)&&t.indexOf(e.type.muiName)!==-1}function ct(e){return e&&e.ownerDocument||document}function ia(e){return ct(e).defaultView||window}function Qw(e,t){return()=>null}function Qn(e,t){typeof e=="function"?e(t):e&&(e.current=t)}const Jw=typeof window<"u"?b.useLayoutEffect:b.useEffect,Va=Jw;let rf=0;function Yw(e){const[t,a]=b.useState(e),i=e||t;return b.useEffect(()=>{t==null&&(rf+=1,a(`mui-${rf}`))},[t]),i}const nf=wn["useId".toString()];function Zw(e){if(nf!==void 0){const t=nf();return e??t}return Yw(e)}function e0(e,t,a,i,s){return null}function Ku({controlled:e,default:t,name:a,state:i="value"}){const{current:s}=b.useRef(e!==void 0),[r,n]=b.useState(t),o=s?e:r,l=b.useCallback(u=>{s||n(u)},[]);return[o,l]}function ci(e){const t=b.useRef(e);return Va(()=>{t.current=e}),b.useCallback((...a)=>(0,t.current)(...a),[])}function Xe(...e){return b.useMemo(()=>e.every(t=>t==null)?null:t=>{e.forEach(a=>{Qn(a,t)})},e)}let Eo=!0,Gu=!1,of;const t0={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a0(e){const{type:t,tagName:a}=e;return!!(a==="INPUT"&&t0[t]&&!e.readOnly||a==="TEXTAREA"&&!e.readOnly||e.isContentEditable)}function i0(e){e.metaKey||e.altKey||e.ctrlKey||(Eo=!0)}function Nl(){Eo=!1}function s0(){this.visibilityState==="hidden"&&Gu&&(Eo=!0)}function r0(e){e.addEventListener("keydown",i0,!0),e.addEventListener("mousedown",Nl,!0),e.addEventListener("pointerdown",Nl,!0),e.addEventListener("touchstart",Nl,!0),e.addEventListener("visibilitychange",s0,!0)}function n0(e){const{target:t}=e;try{return t.matches(":focus-visible")}catch{}return Eo||a0(t)}function $g(){const e=b.useCallback(s=>{s!=null&&r0(s.ownerDocument)},[]),t=b.useRef(!1);function a(){return t.current?(Gu=!0,window.clearTimeout(of),of=window.setTimeout(()=>{Gu=!1},100),t.current=!1,!0):!1}function i(s){return n0(s)?(t.current=!0,!0):!1}return{isFocusVisibleRef:t,onFocus:i,onBlur:a,ref:e}}function Wg(e){const t=e.documentElement.clientWidth;return Math.abs(window.innerWidth-t)}function Ug(e,t){const a=D({},t);return Object.keys(e).forEach(i=>{if(i.toString().match(/^(components|slots)$/))a[i]=D({},e[i],a[i]);else if(i.toString().match(/^(componentsProps|slotProps)$/)){const s=e[i]||{},r=t[i];a[i]={},!r||!Object.keys(r)?a[i]=s:!s||!Object.keys(s)?a[i]=r:(a[i]=D({},r),Object.keys(s).forEach(n=>{a[i][n]=Ug(s[n],r[n])}))}else a[i]===void 0&&(a[i]=e[i])}),a}function Fe(e,t,a=void 0){const i={};return Object.keys(e).forEach(s=>{i[s]=e[s].reduce((r,n)=>{if(n){const o=t(n);o!==""&&r.push(o),a&&a[n]&&r.push(a[n])}return r},[]).join(" ")}),i}const lf=e=>e,o0=()=>{let e=lf;return{configure(t){e=t},generate(t){return e(t)},reset(){e=lf}}},l0=o0(),pc=l0,u0={active:"active",checked:"checked",completed:"completed",disabled:"disabled",readOnly:"readOnly",error:"error",expanded:"expanded",focused:"focused",focusVisible:"focusVisible",required:"required",selected:"selected"};function Pe(e,t,a="Mui"){const i=u0[t];return i?`${a}-${i}`:`${pc.generate(e)}-${t}`}function Te(e,t,a="Mui"){const i={};return t.forEach(s=>{i[s]=Pe(e,s,a)}),i}function Hg(e){var t=Object.create(null);return function(a){return t[a]===void 0&&(t[a]=e(a)),t[a]}}var d0=/^((children|dangerouslySetInnerHTML|key|ref|autoFocus|defaultValue|defaultChecked|innerHTML|suppressContentEditableWarning|suppressHydrationWarning|valueLink|abbr|accept|acceptCharset|accessKey|action|allow|allowUserMedia|allowPaymentRequest|allowFullScreen|allowTransparency|alt|async|autoComplete|autoPlay|capture|cellPadding|cellSpacing|challenge|charSet|checked|cite|classID|className|cols|colSpan|content|contentEditable|contextMenu|controls|controlsList|coords|crossOrigin|data|dateTime|decoding|default|defer|dir|disabled|disablePictureInPicture|download|draggable|encType|enterKeyHint|form|formAction|formEncType|formMethod|formNoValidate|formTarget|frameBorder|headers|height|hidden|high|href|hrefLang|htmlFor|httpEquiv|id|inputMode|integrity|is|keyParams|keyType|kind|label|lang|list|loading|loop|low|marginHeight|marginWidth|max|maxLength|media|mediaGroup|method|min|minLength|multiple|muted|name|nonce|noValidate|open|optimum|pattern|placeholder|playsInline|poster|preload|profile|radioGroup|readOnly|referrerPolicy|rel|required|reversed|role|rows|rowSpan|sandbox|scope|scoped|scrolling|seamless|selected|shape|size|sizes|slot|span|spellCheck|src|srcDoc|srcLang|srcSet|start|step|style|summary|tabIndex|target|title|translate|type|useMap|value|width|wmode|wrap|about|datatype|inlist|prefix|property|resource|typeof|vocab|autoCapitalize|autoCorrect|autoSave|color|incremental|fallback|inert|itemProp|itemScope|itemType|itemID|itemRef|on|option|results|security|unselectable|accentHeight|accumulate|additive|alignmentBaseline|allowReorder|alphabetic|amplitude|arabicForm|ascent|attributeName|attributeType|autoReverse|azimuth|baseFrequency|baselineShift|baseProfile|bbox|begin|bias|by|calcMode|capHeight|clip|clipPathUnits|clipPath|clipRule|colorInterpolation|colorInterpolationFilters|colorProfile|colorRendering|contentScriptType|contentStyleType|cursor|cx|cy|d|decelerate|descent|diffuseConstant|direction|display|divisor|dominantBaseline|dur|dx|dy|edgeMode|elevation|enableBackground|end|exponent|externalResourcesRequired|fill|fillOpacity|fillRule|filter|filterRes|filterUnits|floodColor|floodOpacity|focusable|fontFamily|fontSize|fontSizeAdjust|fontStretch|fontStyle|fontVariant|fontWeight|format|from|fr|fx|fy|g1|g2|glyphName|glyphOrientationHorizontal|glyphOrientationVertical|glyphRef|gradientTransform|gradientUnits|hanging|horizAdvX|horizOriginX|ideographic|imageRendering|in|in2|intercept|k|k1|k2|k3|k4|kernelMatrix|kernelUnitLength|kerning|keyPoints|keySplines|keyTimes|lengthAdjust|letterSpacing|lightingColor|limitingConeAngle|local|markerEnd|markerMid|markerStart|markerHeight|markerUnits|markerWidth|mask|maskContentUnits|maskUnits|mathematical|mode|numOctaves|offset|opacity|operator|order|orient|orientation|origin|overflow|overlinePosition|overlineThickness|panose1|paintOrder|pathLength|patternContentUnits|patternTransform|patternUnits|pointerEvents|points|pointsAtX|pointsAtY|pointsAtZ|preserveAlpha|preserveAspectRatio|primitiveUnits|r|radius|refX|refY|renderingIntent|repeatCount|repeatDur|requiredExtensions|requiredFeatures|restart|result|rotate|rx|ry|scale|seed|shapeRendering|slope|spacing|specularConstant|specularExponent|speed|spreadMethod|startOffset|stdDeviation|stemh|stemv|stitchTiles|stopColor|stopOpacity|strikethroughPosition|strikethroughThickness|string|stroke|strokeDasharray|strokeDashoffset|strokeLinecap|strokeLinejoin|strokeMiterlimit|strokeOpacity|strokeWidth|surfaceScale|systemLanguage|tableValues|targetX|targetY|textAnchor|textDecoration|textRendering|textLength|to|transform|u1|u2|underlinePosition|underlineThickness|unicode|unicodeBidi|unicodeRange|unitsPerEm|vAlphabetic|vHanging|vIdeographic|vMathematical|values|vectorEffect|version|vertAdvY|vertOriginX|vertOriginY|viewBox|viewTarget|visibility|widths|wordSpacing|writingMode|x|xHeight|x1|x2|xChannelSelector|xlinkActuate|xlinkArcrole|xlinkHref|xlinkRole|xlinkShow|xlinkTitle|xlinkType|xmlBase|xmlns|xmlnsXlink|xmlLang|xmlSpace|y|y1|y2|yChannelSelector|z|zoomAndPan|for|class|autofocus)|(([Dd][Aa][Tt][Aa]|[Aa][Rr][Ii][Aa]|x)-.*))$/,c0=Hg(function(e){return d0.test(e)||e.charCodeAt(0)===111&&e.charCodeAt(1)===110&&e.charCodeAt(2)<91});function p0(e){if(e.sheet)return e.sheet;for(var t=0;t0?Ve(ys,--pt):0,ls--,Re===10&&(ls=1,Fo--),Re}function _t(){return Re=pt2||_r(Re)>3?"":" "}function D0(e,t){for(;--t&&_t()&&!(Re<48||Re>102||Re>57&&Re<65||Re>70&&Re<97););return Cr(e,yn()+(t<6&&aa()==32&&_t()==32))}function Qu(e){for(;_t();)switch(Re){case e:return pt;case 34:case 39:e!==34&&e!==39&&Qu(Re);break;case 40:e===41&&Qu(e);break;case 92:_t();break}return pt}function k0(e,t){for(;_t()&&e+Re!==47+10;)if(e+Re===42+42&&aa()===47)break;return"/*"+Cr(t,pt-1)+"*"+jo(e===47?e:_t())}function S0(e){for(;!_r(aa());)_t();return Cr(e,pt)}function C0(e){return Jg(vn("",null,null,null,[""],e=Qg(e),0,[0],e))}function vn(e,t,a,i,s,r,n,o,l){for(var u=0,d=0,f=n,g=0,c=0,m=0,p=1,q=1,y=1,h=0,_="",w=s,x=r,k=i,T=_;q;)switch(m=h,h=_t()){case 40:if(m!=108&&Ve(T,f-1)==58){Xu(T+=te(_n(h),"&","&\f"),"&\f")!=-1&&(y=-1);break}case 34:case 39:case 91:T+=_n(h);break;case 9:case 10:case 13:case 32:T+=q0(m);break;case 92:T+=D0(yn()-1,7);continue;case 47:switch(aa()){case 42:case 47:Zr(P0(k0(_t(),yn()),t,a),l);break;default:T+="/"}break;case 123*p:o[u++]=Xt(T)*y;case 125*p:case 59:case 0:switch(h){case 0:case 125:q=0;case 59+d:y==-1&&(T=te(T,/\f/g,"")),c>0&&Xt(T)-f&&Zr(c>32?df(T+";",i,a,f-1):df(te(T," ","")+";",i,a,f-2),l);break;case 59:T+=";";default:if(Zr(k=uf(T,t,a,u,d,s,o,_,w=[],x=[],f),r),h===123)if(d===0)vn(T,t,k,k,w,r,f,o,x);else switch(g===99&&Ve(T,3)===110?100:g){case 100:case 108:case 109:case 115:vn(e,k,k,i&&Zr(uf(e,k,k,0,0,s,o,_,s,w=[],f),x),s,x,f,o,i?w:x);break;default:vn(T,k,k,k,[""],x,0,o,x)}}u=d=c=0,p=y=1,_=T="",f=n;break;case 58:f=1+Xt(T),c=m;default:if(p<1){if(h==123)--p;else if(h==125&&p++==0&&x0()==125)continue}switch(T+=jo(h),h*p){case 38:y=d>0?1:(T+="\f",-1);break;case 44:o[u++]=(Xt(T)-1)*y,y=1;break;case 64:aa()===45&&(T+=_n(_t())),g=aa(),d=f=Xt(_=T+=S0(yn())),h++;break;case 45:m===45&&Xt(T)==2&&(p=0)}}return r}function uf(e,t,a,i,s,r,n,o,l,u,d){for(var f=s-1,g=s===0?r:[""],c=hc(g),m=0,p=0,q=0;m0?g[y]+" "+h:te(h,/&\f/g,g[y])))&&(l[q++]=_);return No(e,t,a,s===0?fc:o,l,u,d)}function P0(e,t,a){return No(e,t,a,Vg,jo(T0()),yr(e,2,-2),0)}function df(e,t,a,i){return No(e,t,a,mc,yr(e,0,i),yr(e,i+1,-1),i)}function Zi(e,t){for(var a="",i=hc(e),s=0;s6)switch(Ve(e,t+1)){case 109:if(Ve(e,t+4)!==45)break;case 102:return te(e,/(.+:)(.+)-([^]+)/,"$1"+ee+"$2-$3$1"+Jn+(Ve(e,t+3)==108?"$3":"$2-$3"))+e;case 115:return~Xu(e,"stretch")?Yg(te(e,"stretch","fill-available"),t)+e:e}break;case 4949:if(Ve(e,t+1)!==115)break;case 6444:switch(Ve(e,Xt(e)-3-(~Xu(e,"!important")&&10))){case 107:return te(e,":",":"+ee)+e;case 101:return te(e,/(.+:)([^;!]+)(;|!.+)?/,"$1"+ee+(Ve(e,14)===45?"inline-":"")+"box$3$1"+ee+"$2$3$1"+Ye+"$2box$3")+e}break;case 5936:switch(Ve(e,t+11)){case 114:return ee+e+Ye+te(e,/[svh]\w+-[tblr]{2}/,"tb")+e;case 108:return ee+e+Ye+te(e,/[svh]\w+-[tblr]{2}/,"tb-rl")+e;case 45:return ee+e+Ye+te(e,/[svh]\w+-[tblr]{2}/,"lr")+e}return ee+e+Ye+e+e}return e}var M0=function(t,a,i,s){if(t.length>-1&&!t.return)switch(t.type){case mc:t.return=Yg(t.value,t.length);break;case Kg:return Zi([Ss(t,{value:te(t.value,"@","@"+ee)})],s);case fc:if(t.length)return w0(t.props,function(r){switch(b0(r,/(::plac\w+|:read-\w+)/)){case":read-only":case":read-write":return Zi([Ss(t,{props:[te(r,/:(read-\w+)/,":"+Jn+"$1")]})],s);case"::placeholder":return Zi([Ss(t,{props:[te(r,/:(plac\w+)/,":"+ee+"input-$1")]}),Ss(t,{props:[te(r,/:(plac\w+)/,":"+Jn+"$1")]}),Ss(t,{props:[te(r,/:(plac\w+)/,Ye+"input-$1")]})],s)}return""})}},O0=[M0],B0=function(t){var a=t.key;if(a==="css"){var i=document.querySelectorAll("style[data-emotion]:not([data-s])");Array.prototype.forEach.call(i,function(p){var q=p.getAttribute("data-emotion");q.indexOf(" ")!==-1&&(document.head.appendChild(p),p.setAttribute("data-s",""))})}var s=t.stylisPlugins||O0,r={},n,o=[];n=t.container||document.head,Array.prototype.forEach.call(document.querySelectorAll('style[data-emotion^="'+a+' "]'),function(p){for(var q=p.getAttribute("data-emotion").split(" "),y=1;y=4;++i,s-=4)a=e.charCodeAt(i)&255|(e.charCodeAt(++i)&255)<<8|(e.charCodeAt(++i)&255)<<16|(e.charCodeAt(++i)&255)<<24,a=(a&65535)*1540483477+((a>>>16)*59797<<16),a^=a>>>24,t=(a&65535)*1540483477+((a>>>16)*59797<<16)^(t&65535)*1540483477+((t>>>16)*59797<<16);switch(s){case 3:t^=(e.charCodeAt(i+2)&255)<<16;case 2:t^=(e.charCodeAt(i+1)&255)<<8;case 1:t^=e.charCodeAt(i)&255,t=(t&65535)*1540483477+((t>>>16)*59797<<16)}return t^=t>>>13,t=(t&65535)*1540483477+((t>>>16)*59797<<16),((t^t>>>15)>>>0).toString(36)}var Y0={animationIterationCount:1,aspectRatio:1,borderImageOutset:1,borderImageSlice:1,borderImageWidth:1,boxFlex:1,boxFlexGroup:1,boxOrdinalGroup:1,columnCount:1,columns:1,flex:1,flexGrow:1,flexPositive:1,flexShrink:1,flexNegative:1,flexOrder:1,gridRow:1,gridRowEnd:1,gridRowSpan:1,gridRowStart:1,gridColumn:1,gridColumnEnd:1,gridColumnSpan:1,gridColumnStart:1,msGridRow:1,msGridRowSpan:1,msGridColumn:1,msGridColumnSpan:1,fontWeight:1,lineHeight:1,opacity:1,order:1,orphans:1,tabSize:1,widows:1,zIndex:1,zoom:1,WebkitLineClamp:1,fillOpacity:1,floodOpacity:1,stopOpacity:1,strokeDasharray:1,strokeDashoffset:1,strokeMiterlimit:1,strokeOpacity:1,strokeWidth:1},Z0=/[A-Z]|^ms/g,e1=/_EMO_([^_]+?)_([^]*?)_EMO_/g,ry=function(t){return t.charCodeAt(1)===45},pf=function(t){return t!=null&&typeof t!="boolean"},Ll=Hg(function(e){return ry(e)?e:e.replace(Z0,"-$&").toLowerCase()}),ff=function(t,a){switch(t){case"animation":case"animationName":if(typeof a=="string")return a.replace(e1,function(i,s,r){return Qt={name:s,styles:r,next:Qt},s})}return Y0[t]!==1&&!ry(t)&&typeof a=="number"&&a!==0?a+"px":a};function vr(e,t,a){if(a==null)return"";if(a.__emotion_styles!==void 0)return a;switch(typeof a){case"boolean":return"";case"object":{if(a.anim===1)return Qt={name:a.name,styles:a.styles,next:Qt},a.name;if(a.styles!==void 0){var i=a.next;if(i!==void 0)for(;i!==void 0;)Qt={name:i.name,styles:i.styles,next:Qt},i=i.next;var s=a.styles+";";return s}return t1(e,t,a)}case"function":{if(e!==void 0){var r=Qt,n=a(e);return Qt=r,vr(e,t,n)}break}}if(t==null)return a;var o=t[a];return o!==void 0?o:a}function t1(e,t,a){var i="";if(Array.isArray(a))for(var s=0;s96?n1:o1},yf=function(t,a,i){var s;if(a){var r=a.shouldForwardProp;s=t.__emotion_forwardProp&&r?function(n){return t.__emotion_forwardProp(n)&&r(n)}:r}return typeof s!="function"&&i&&(s=t.__emotion_forwardProp),s},l1=function(t){var a=t.cache,i=t.serialized,s=t.isStringTag;return iy(a,i,s),i1(function(){return sy(a,i,s)}),null},u1=function e(t,a){var i=t.__emotion_real===t,s=i&&t.__emotion_base||t,r,n;a!==void 0&&(r=a.label,n=a.target);var o=yf(t,a,i),l=o||gf(s),u=!l("as");return function(){var d=arguments,f=i&&t.__emotion_styles!==void 0?t.__emotion_styles.slice(0):[];if(r!==void 0&&f.push("label:"+r+";"),d[0]==null||d[0].raw===void 0)f.push.apply(f,d);else{f.push(d[0][0]);for(var g=d.length,c=1;ct(c1(s)?a:s):t;return v.jsx(s1,{styles:i})}/** + * @mui/styled-engine v5.13.2 + * + * @license MIT + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */function uy(e,t){return Ju(e,t)}const f1=(e,t)=>{Array.isArray(e.__emotion_styles)&&(e.__emotion_styles=t(e.__emotion_styles))};function U(e,t){if(e==null)return{};var a={},i=Object.keys(e),s,r;for(r=0;r=0)&&(a[s]=e[s]);return a}const m1=["values","unit","step"],h1=e=>{const t=Object.keys(e).map(a=>({key:a,val:e[a]}))||[];return t.sort((a,i)=>a.val-i.val),t.reduce((a,i)=>D({},a,{[i.key]:i.val}),{})};function g1(e){const{values:t={xs:0,sm:600,md:900,lg:1200,xl:1536},unit:a="px",step:i=5}=e,s=U(e,m1),r=h1(t),n=Object.keys(r);function o(g){return`@media (min-width:${typeof t[g]=="number"?t[g]:g}${a})`}function l(g){return`@media (max-width:${(typeof t[g]=="number"?t[g]:g)-i/100}${a})`}function u(g,c){const m=n.indexOf(c);return`@media (min-width:${typeof t[g]=="number"?t[g]:g}${a}) and (max-width:${(m!==-1&&typeof t[n[m]]=="number"?t[n[m]]:c)-i/100}${a})`}function d(g){return n.indexOf(g)+1`@media (min-width:${Tc[e]}px)`};function ya(e,t,a){const i=e.theme||{};if(Array.isArray(t)){const r=i.breakpoints||_f;return t.reduce((n,o,l)=>(n[r.up(r.keys[l])]=a(t[l]),n),{})}if(typeof t=="object"){const r=i.breakpoints||_f;return Object.keys(t).reduce((n,o)=>{if(Object.keys(r.values||Tc).indexOf(o)!==-1){const l=r.up(o);n[l]=a(t[o],o)}else{const l=o;n[l]=t[l]}return n},{})}return a(t)}function v1(e={}){var t;return((t=e.keys)==null?void 0:t.reduce((i,s)=>{const r=e.up(s);return i[r]={},i},{}))||{}}function b1(e,t){return e.reduce((a,i)=>{const s=a[i];return(!s||Object.keys(s).length===0)&&delete a[i],a},t)}function Ko(e,t,a=!0){if(!t||typeof t!="string")return null;if(e&&e.vars&&a){const i=`vars.${t}`.split(".").reduce((s,r)=>s&&s[r]?s[r]:null,e);if(i!=null)return i}return t.split(".").reduce((i,s)=>i&&i[s]!=null?i[s]:null,e)}function Yn(e,t,a,i=a){let s;return typeof e=="function"?s=e(a):Array.isArray(e)?s=e[a]||i:s=Ko(e,a)||i,t&&(s=t(s,i,e)),s}function ae(e){const{prop:t,cssProperty:a=e.prop,themeKey:i,transform:s}=e,r=n=>{if(n[t]==null)return null;const o=n[t],l=n.theme,u=Ko(l,i)||{};return ya(n,o,f=>{let g=Yn(u,s,f);return f===g&&typeof f=="string"&&(g=Yn(u,s,`${t}${f==="default"?"":be(f)}`,f)),a===!1?g:{[a]:g}})};return r.propTypes={},r.filterProps=[t],r}function w1(e){const t={};return a=>(t[a]===void 0&&(t[a]=e(a)),t[a])}const T1={m:"margin",p:"padding"},x1={t:"Top",r:"Right",b:"Bottom",l:"Left",x:["Left","Right"],y:["Top","Bottom"]},vf={marginX:"mx",marginY:"my",paddingX:"px",paddingY:"py"},q1=w1(e=>{if(e.length>2)if(vf[e])e=vf[e];else return[e];const[t,a]=e.split(""),i=T1[t],s=x1[a]||"";return Array.isArray(s)?s.map(r=>i+r):[i+s]}),xc=["m","mt","mr","mb","ml","mx","my","margin","marginTop","marginRight","marginBottom","marginLeft","marginX","marginY","marginInline","marginInlineStart","marginInlineEnd","marginBlock","marginBlockStart","marginBlockEnd"],qc=["p","pt","pr","pb","pl","px","py","padding","paddingTop","paddingRight","paddingBottom","paddingLeft","paddingX","paddingY","paddingInline","paddingInlineStart","paddingInlineEnd","paddingBlock","paddingBlockStart","paddingBlockEnd"];[...xc,...qc];function Pr(e,t,a,i){var s;const r=(s=Ko(e,t,!1))!=null?s:a;return typeof r=="number"?n=>typeof n=="string"?n:r*n:Array.isArray(r)?n=>typeof n=="string"?n:r[n]:typeof r=="function"?r:()=>{}}function dy(e){return Pr(e,"spacing",8)}function Ar(e,t){if(typeof t=="string"||t==null)return t;const a=Math.abs(t),i=e(a);return t>=0?i:typeof i=="number"?-i:`-${i}`}function D1(e,t){return a=>e.reduce((i,s)=>(i[s]=Ar(t,a),i),{})}function k1(e,t,a,i){if(t.indexOf(a)===-1)return null;const s=q1(a),r=D1(s,i),n=e[a];return ya(e,n,r)}function cy(e,t){const a=dy(e.theme);return Object.keys(e).map(i=>k1(e,t,i,a)).reduce(Xs,{})}function De(e){return cy(e,xc)}De.propTypes={};De.filterProps=xc;function ke(e){return cy(e,qc)}ke.propTypes={};ke.filterProps=qc;function S1(e=8){if(e.mui)return e;const t=dy({spacing:e}),a=(...i)=>(i.length===0?[1]:i).map(r=>{const n=t(r);return typeof n=="number"?`${n}px`:n}).join(" ");return a.mui=!0,a}function Go(...e){const t=e.reduce((i,s)=>(s.filterProps.forEach(r=>{i[r]=s}),i),{}),a=i=>Object.keys(i).reduce((s,r)=>t[r]?Xs(s,t[r](i)):s,{});return a.propTypes={},a.filterProps=e.reduce((i,s)=>i.concat(s.filterProps),[]),a}function Yt(e){return typeof e!="number"?e:`${e}px solid`}const C1=ae({prop:"border",themeKey:"borders",transform:Yt}),P1=ae({prop:"borderTop",themeKey:"borders",transform:Yt}),A1=ae({prop:"borderRight",themeKey:"borders",transform:Yt}),I1=ae({prop:"borderBottom",themeKey:"borders",transform:Yt}),R1=ae({prop:"borderLeft",themeKey:"borders",transform:Yt}),E1=ae({prop:"borderColor",themeKey:"palette"}),j1=ae({prop:"borderTopColor",themeKey:"palette"}),F1=ae({prop:"borderRightColor",themeKey:"palette"}),N1=ae({prop:"borderBottomColor",themeKey:"palette"}),L1=ae({prop:"borderLeftColor",themeKey:"palette"}),Xo=e=>{if(e.borderRadius!==void 0&&e.borderRadius!==null){const t=Pr(e.theme,"shape.borderRadius",4),a=i=>({borderRadius:Ar(t,i)});return ya(e,e.borderRadius,a)}return null};Xo.propTypes={};Xo.filterProps=["borderRadius"];Go(C1,P1,A1,I1,R1,E1,j1,F1,N1,L1,Xo);const Qo=e=>{if(e.gap!==void 0&&e.gap!==null){const t=Pr(e.theme,"spacing",8),a=i=>({gap:Ar(t,i)});return ya(e,e.gap,a)}return null};Qo.propTypes={};Qo.filterProps=["gap"];const Jo=e=>{if(e.columnGap!==void 0&&e.columnGap!==null){const t=Pr(e.theme,"spacing",8),a=i=>({columnGap:Ar(t,i)});return ya(e,e.columnGap,a)}return null};Jo.propTypes={};Jo.filterProps=["columnGap"];const Yo=e=>{if(e.rowGap!==void 0&&e.rowGap!==null){const t=Pr(e.theme,"spacing",8),a=i=>({rowGap:Ar(t,i)});return ya(e,e.rowGap,a)}return null};Yo.propTypes={};Yo.filterProps=["rowGap"];const M1=ae({prop:"gridColumn"}),O1=ae({prop:"gridRow"}),B1=ae({prop:"gridAutoFlow"}),z1=ae({prop:"gridAutoColumns"}),$1=ae({prop:"gridAutoRows"}),W1=ae({prop:"gridTemplateColumns"}),U1=ae({prop:"gridTemplateRows"}),H1=ae({prop:"gridTemplateAreas"}),V1=ae({prop:"gridArea"});Go(Qo,Jo,Yo,M1,O1,B1,z1,$1,W1,U1,H1,V1);function es(e,t){return t==="grey"?t:e}const K1=ae({prop:"color",themeKey:"palette",transform:es}),G1=ae({prop:"bgcolor",cssProperty:"backgroundColor",themeKey:"palette",transform:es}),X1=ae({prop:"backgroundColor",themeKey:"palette",transform:es});Go(K1,G1,X1);function ht(e){return e<=1&&e!==0?`${e*100}%`:e}const Q1=ae({prop:"width",transform:ht}),Dc=e=>{if(e.maxWidth!==void 0&&e.maxWidth!==null){const t=a=>{var i;return{maxWidth:((i=e.theme)==null||(i=i.breakpoints)==null||(i=i.values)==null?void 0:i[a])||Tc[a]||ht(a)}};return ya(e,e.maxWidth,t)}return null};Dc.filterProps=["maxWidth"];const J1=ae({prop:"minWidth",transform:ht}),Y1=ae({prop:"height",transform:ht}),Z1=ae({prop:"maxHeight",transform:ht}),eT=ae({prop:"minHeight",transform:ht});ae({prop:"size",cssProperty:"width",transform:ht});ae({prop:"size",cssProperty:"height",transform:ht});const tT=ae({prop:"boxSizing"});Go(Q1,Dc,J1,Y1,Z1,eT,tT);const aT={border:{themeKey:"borders",transform:Yt},borderTop:{themeKey:"borders",transform:Yt},borderRight:{themeKey:"borders",transform:Yt},borderBottom:{themeKey:"borders",transform:Yt},borderLeft:{themeKey:"borders",transform:Yt},borderColor:{themeKey:"palette"},borderTopColor:{themeKey:"palette"},borderRightColor:{themeKey:"palette"},borderBottomColor:{themeKey:"palette"},borderLeftColor:{themeKey:"palette"},borderRadius:{themeKey:"shape.borderRadius",style:Xo},color:{themeKey:"palette",transform:es},bgcolor:{themeKey:"palette",cssProperty:"backgroundColor",transform:es},backgroundColor:{themeKey:"palette",transform:es},p:{style:ke},pt:{style:ke},pr:{style:ke},pb:{style:ke},pl:{style:ke},px:{style:ke},py:{style:ke},padding:{style:ke},paddingTop:{style:ke},paddingRight:{style:ke},paddingBottom:{style:ke},paddingLeft:{style:ke},paddingX:{style:ke},paddingY:{style:ke},paddingInline:{style:ke},paddingInlineStart:{style:ke},paddingInlineEnd:{style:ke},paddingBlock:{style:ke},paddingBlockStart:{style:ke},paddingBlockEnd:{style:ke},m:{style:De},mt:{style:De},mr:{style:De},mb:{style:De},ml:{style:De},mx:{style:De},my:{style:De},margin:{style:De},marginTop:{style:De},marginRight:{style:De},marginBottom:{style:De},marginLeft:{style:De},marginX:{style:De},marginY:{style:De},marginInline:{style:De},marginInlineStart:{style:De},marginInlineEnd:{style:De},marginBlock:{style:De},marginBlockStart:{style:De},marginBlockEnd:{style:De},displayPrint:{cssProperty:!1,transform:e=>({"@media print":{display:e}})},display:{},overflow:{},textOverflow:{},visibility:{},whiteSpace:{},flexBasis:{},flexDirection:{},flexWrap:{},justifyContent:{},alignItems:{},alignContent:{},order:{},flex:{},flexGrow:{},flexShrink:{},alignSelf:{},justifyItems:{},justifySelf:{},gap:{style:Qo},rowGap:{style:Yo},columnGap:{style:Jo},gridColumn:{},gridRow:{},gridAutoFlow:{},gridAutoColumns:{},gridAutoRows:{},gridTemplateColumns:{},gridTemplateRows:{},gridTemplateAreas:{},gridArea:{},position:{},zIndex:{themeKey:"zIndex"},top:{},right:{},bottom:{},left:{},boxShadow:{themeKey:"shadows"},width:{transform:ht},maxWidth:{style:Dc},minWidth:{transform:ht},height:{transform:ht},maxHeight:{transform:ht},minHeight:{transform:ht},boxSizing:{},fontFamily:{themeKey:"typography"},fontSize:{themeKey:"typography"},fontStyle:{themeKey:"typography"},fontWeight:{themeKey:"typography"},letterSpacing:{},textTransform:{},lineHeight:{},textAlign:{},typography:{cssProperty:!1,themeKey:"typography"}},Zo=aT;function iT(...e){const t=e.reduce((i,s)=>i.concat(Object.keys(s)),[]),a=new Set(t);return e.every(i=>a.size===Object.keys(i).length)}function sT(e,t){return typeof e=="function"?e(t):e}function rT(){function e(a,i,s,r){const n={[a]:i,theme:s},o=r[a];if(!o)return{[a]:i};const{cssProperty:l=a,themeKey:u,transform:d,style:f}=o;if(i==null)return null;if(u==="typography"&&i==="inherit")return{[a]:i};const g=Ko(s,u)||{};return f?f(n):ya(n,i,m=>{let p=Yn(g,d,m);return m===p&&typeof m=="string"&&(p=Yn(g,d,`${a}${m==="default"?"":be(m)}`,m)),l===!1?p:{[l]:p}})}function t(a){var i;const{sx:s,theme:r={}}=a||{};if(!s)return null;const n=(i=r.unstable_sxConfig)!=null?i:Zo;function o(l){let u=l;if(typeof l=="function")u=l(r);else if(typeof l!="object")return l;if(!u)return null;const d=v1(r.breakpoints),f=Object.keys(d);let g=d;return Object.keys(u).forEach(c=>{const m=sT(u[c],r);if(m!=null)if(typeof m=="object")if(n[c])g=Xs(g,e(c,m,r,n));else{const p=ya({theme:r},m,q=>({[c]:q}));iT(p,m)?g[c]=t({sx:m,theme:r}):g=Xs(g,p)}else g=Xs(g,e(c,m,r,n))}),b1(f,g)}return Array.isArray(s)?s.map(o):o(s)}return t}const py=rT();py.filterProps=["sx"];const el=py,nT=["breakpoints","palette","spacing","shape"];function kc(e={},...t){const{breakpoints:a={},palette:i={},spacing:s,shape:r={}}=e,n=U(e,nT),o=g1(a),l=S1(s);let u=Rt({breakpoints:o,direction:"ltr",components:{},palette:D({mode:"light"},i),spacing:l,shape:D({},_1,r)},n);return u=t.reduce((d,f)=>Rt(d,f),u),u.unstable_sxConfig=D({},Zo,n==null?void 0:n.unstable_sxConfig),u.unstable_sx=function(f){return el({sx:f,theme:this})},u}function oT(e){return Object.keys(e).length===0}function lT(e=null){const t=b.useContext(bc);return!t||oT(t)?e:t}const uT=kc();function tl(e=uT){return lT(e)}function dT({styles:e,themeId:t,defaultTheme:a={}}){const i=tl(a),s=typeof e=="function"?e(t&&i[t]||i):e;return v.jsx(p1,{styles:s})}const cT=["sx"],pT=e=>{var t,a;const i={systemProps:{},otherProps:{}},s=(t=e==null||(a=e.theme)==null?void 0:a.unstable_sxConfig)!=null?t:Zo;return Object.keys(e).forEach(r=>{s[r]?i.systemProps[r]=e[r]:i.otherProps[r]=e[r]}),i};function fT(e){const{sx:t}=e,a=U(e,cT),{systemProps:i,otherProps:s}=pT(a);let r;return Array.isArray(t)?r=[i,...t]:typeof t=="function"?r=(...n)=>{const o=t(...n);return ri(o)?D({},i,o):i}:r=D({},i,t),D({},s,{sx:r})}function fy(e){var t,a,i="";if(typeof e=="string"||typeof e=="number")i+=e;else if(typeof e=="object")if(Array.isArray(e))for(t=0;to!=="theme"&&o!=="sx"&&o!=="as"})(el);return b.forwardRef(function(l,u){const d=tl(a),f=fT(l),{className:g,component:c="div"}=f,m=U(f,mT);return v.jsx(r,D({as:c,ref:u,className:Q(g,s?s(i):i),theme:t&&d[t]||d},m))})}const gT=["variant"];function bf(e){return e.length===0}function my(e){const{variant:t}=e,a=U(e,gT);let i=t||"";return Object.keys(a).sort().forEach(s=>{s==="color"?i+=bf(i)?e[s]:be(e[s]):i+=`${bf(i)?s:be(s)}${be(e[s].toString())}`}),i}const yT=["name","slot","skipVariantsResolver","skipSx","overridesResolver"];function _T(e){return Object.keys(e).length===0}function vT(e){return typeof e=="string"&&e.charCodeAt(0)>96}const bT=(e,t)=>t.components&&t.components[e]&&t.components[e].styleOverrides?t.components[e].styleOverrides:null,wT=(e,t)=>{let a=[];t&&t.components&&t.components[e]&&t.components[e].variants&&(a=t.components[e].variants);const i={};return a.forEach(s=>{const r=my(s.props);i[r]=s.style}),i},TT=(e,t,a,i)=>{var s;const{ownerState:r={}}=e,n=[],o=a==null||(s=a.components)==null||(s=s[i])==null?void 0:s.variants;return o&&o.forEach(l=>{let u=!0;Object.keys(l.props).forEach(d=>{r[d]!==l.props[d]&&e[d]!==l.props[d]&&(u=!1)}),u&&n.push(t[my(l.props)])}),n};function Qs(e){return e!=="ownerState"&&e!=="theme"&&e!=="sx"&&e!=="as"}const xT=kc(),qT=e=>e&&e.charAt(0).toLowerCase()+e.slice(1);function Cs({defaultTheme:e,theme:t,themeId:a}){return _T(t)?e:t[a]||t}function DT(e){return e?(t,a)=>a[e]:null}function kT(e={}){const{themeId:t,defaultTheme:a=xT,rootShouldForwardProp:i=Qs,slotShouldForwardProp:s=Qs}=e,r=n=>el(D({},n,{theme:Cs(D({},n,{defaultTheme:a,themeId:t}))}));return r.__mui_systemSx=!0,(n,o={})=>{f1(n,w=>w.filter(x=>!(x!=null&&x.__mui_systemSx)));const{name:l,slot:u,skipVariantsResolver:d,skipSx:f,overridesResolver:g=DT(qT(u))}=o,c=U(o,yT),m=d!==void 0?d:u&&u!=="Root"&&u!=="root"||!1,p=f||!1;let q,y=Qs;u==="Root"||u==="root"?y=i:u?y=s:vT(n)&&(y=void 0);const h=uy(n,D({shouldForwardProp:y,label:q},c)),_=(w,...x)=>{const k=x?x.map(P=>typeof P=="function"&&P.__emotion_real!==P?j=>P(D({},j,{theme:Cs(D({},j,{defaultTheme:a,themeId:t}))})):P):[];let T=w;l&&g&&k.push(P=>{const j=Cs(D({},P,{defaultTheme:a,themeId:t})),O=bT(l,j);if(O){const z={};return Object.entries(O).forEach(([R,F])=>{z[R]=typeof F=="function"?F(D({},P,{theme:j})):F}),g(P,z)}return null}),l&&!m&&k.push(P=>{const j=Cs(D({},P,{defaultTheme:a,themeId:t}));return TT(P,wT(l,j),j,l)}),p||k.push(r);const S=k.length-x.length;if(Array.isArray(w)&&S>0){const P=new Array(S).fill("");T=[...w,...P],T.raw=[...w.raw,...P]}else typeof w=="function"&&w.__emotion_real!==w&&(T=P=>w(D({},P,{theme:Cs(D({},P,{defaultTheme:a,themeId:t}))})));const A=h(T,...k);return n.muiName&&(A.muiName=n.muiName),A};return h.withConfig&&(_.withConfig=h.withConfig),_}}function ST(e){const{theme:t,name:a,props:i}=e;return!t||!t.components||!t.components[a]||!t.components[a].defaultProps?i:Ug(t.components[a].defaultProps,i)}function CT({props:e,name:t,defaultTheme:a,themeId:i}){let s=tl(a);return i&&(s=s[i]||s),ST({theme:s,name:t,props:e})}function Sc(e,t=0,a=1){return Math.min(Math.max(t,e),a)}function PT(e){e=e.slice(1);const t=new RegExp(`.{1,${e.length>=6?2:1}}`,"g");let a=e.match(t);return a&&a[0].length===1&&(a=a.map(i=>i+i)),a?`rgb${a.length===4?"a":""}(${a.map((i,s)=>s<3?parseInt(i,16):Math.round(parseInt(i,16)/255*1e3)/1e3).join(", ")})`:""}function wi(e){if(e.type)return e;if(e.charAt(0)==="#")return wi(PT(e));const t=e.indexOf("("),a=e.substring(0,t);if(["rgb","rgba","hsl","hsla","color"].indexOf(a)===-1)throw new Error(Ha(9,e));let i=e.substring(t+1,e.length-1),s;if(a==="color"){if(i=i.split(" "),s=i.shift(),i.length===4&&i[3].charAt(0)==="/"&&(i[3]=i[3].slice(1)),["srgb","display-p3","a98-rgb","prophoto-rgb","rec-2020"].indexOf(s)===-1)throw new Error(Ha(10,s))}else i=i.split(",");return i=i.map(r=>parseFloat(r)),{type:a,values:i,colorSpace:s}}function al(e){const{type:t,colorSpace:a}=e;let{values:i}=e;return t.indexOf("rgb")!==-1?i=i.map((s,r)=>r<3?parseInt(s,10):s):t.indexOf("hsl")!==-1&&(i[1]=`${i[1]}%`,i[2]=`${i[2]}%`),t.indexOf("color")!==-1?i=`${a} ${i.join(" ")}`:i=`${i.join(", ")}`,`${t}(${i})`}function AT(e){e=wi(e);const{values:t}=e,a=t[0],i=t[1]/100,s=t[2]/100,r=i*Math.min(s,1-s),n=(u,d=(u+a/30)%12)=>s-r*Math.max(Math.min(d-3,9-d,1),-1);let o="rgb";const l=[Math.round(n(0)*255),Math.round(n(8)*255),Math.round(n(4)*255)];return e.type==="hsla"&&(o+="a",l.push(t[3])),al({type:o,values:l})}function wf(e){e=wi(e);let t=e.type==="hsl"||e.type==="hsla"?wi(AT(e)).values:e.values;return t=t.map(a=>(e.type!=="color"&&(a/=255),a<=.03928?a/12.92:((a+.055)/1.055)**2.4)),Number((.2126*t[0]+.7152*t[1]+.0722*t[2]).toFixed(3))}function IT(e,t){const a=wf(e),i=wf(t);return(Math.max(a,i)+.05)/(Math.min(a,i)+.05)}function pi(e,t){return e=wi(e),t=Sc(t),(e.type==="rgb"||e.type==="hsl")&&(e.type+="a"),e.type==="color"?e.values[3]=`/${t}`:e.values[3]=t,al(e)}function RT(e,t){if(e=wi(e),t=Sc(t),e.type.indexOf("hsl")!==-1)e.values[2]*=1-t;else if(e.type.indexOf("rgb")!==-1||e.type.indexOf("color")!==-1)for(let a=0;a<3;a+=1)e.values[a]*=1-t;return al(e)}function ET(e,t){if(e=wi(e),t=Sc(t),e.type.indexOf("hsl")!==-1)e.values[2]+=(100-e.values[2])*t;else if(e.type.indexOf("rgb")!==-1)for(let a=0;a<3;a+=1)e.values[a]+=(255-e.values[a])*t;else if(e.type.indexOf("color")!==-1)for(let a=0;a<3;a+=1)e.values[a]+=(1-e.values[a])*t;return al(e)}const Ir="$$material";function jT(e,t){return D({toolbar:{minHeight:56,[e.up("xs")]:{"@media (orientation: landscape)":{minHeight:48}},[e.up("sm")]:{minHeight:64}}},t)}const FT={black:"#000",white:"#fff"},br=FT,NT={50:"#fafafa",100:"#f5f5f5",200:"#eeeeee",300:"#e0e0e0",400:"#bdbdbd",500:"#9e9e9e",600:"#757575",700:"#616161",800:"#424242",900:"#212121",A100:"#f5f5f5",A200:"#eeeeee",A400:"#bdbdbd",A700:"#616161"},LT=NT,MT={50:"#f3e5f5",100:"#e1bee7",200:"#ce93d8",300:"#ba68c8",400:"#ab47bc",500:"#9c27b0",600:"#8e24aa",700:"#7b1fa2",800:"#6a1b9a",900:"#4a148c",A100:"#ea80fc",A200:"#e040fb",A400:"#d500f9",A700:"#aa00ff"},Si=MT,OT={50:"#ffebee",100:"#ffcdd2",200:"#ef9a9a",300:"#e57373",400:"#ef5350",500:"#f44336",600:"#e53935",700:"#d32f2f",800:"#c62828",900:"#b71c1c",A100:"#ff8a80",A200:"#ff5252",A400:"#ff1744",A700:"#d50000"},Ci=OT,BT={50:"#fff3e0",100:"#ffe0b2",200:"#ffcc80",300:"#ffb74d",400:"#ffa726",500:"#ff9800",600:"#fb8c00",700:"#f57c00",800:"#ef6c00",900:"#e65100",A100:"#ffd180",A200:"#ffab40",A400:"#ff9100",A700:"#ff6d00"},Ps=BT,zT={50:"#e3f2fd",100:"#bbdefb",200:"#90caf9",300:"#64b5f6",400:"#42a5f5",500:"#2196f3",600:"#1e88e5",700:"#1976d2",800:"#1565c0",900:"#0d47a1",A100:"#82b1ff",A200:"#448aff",A400:"#2979ff",A700:"#2962ff"},Pi=zT,$T={50:"#e1f5fe",100:"#b3e5fc",200:"#81d4fa",300:"#4fc3f7",400:"#29b6f6",500:"#03a9f4",600:"#039be5",700:"#0288d1",800:"#0277bd",900:"#01579b",A100:"#80d8ff",A200:"#40c4ff",A400:"#00b0ff",A700:"#0091ea"},Ai=$T,WT={50:"#e8f5e9",100:"#c8e6c9",200:"#a5d6a7",300:"#81c784",400:"#66bb6a",500:"#4caf50",600:"#43a047",700:"#388e3c",800:"#2e7d32",900:"#1b5e20",A100:"#b9f6ca",A200:"#69f0ae",A400:"#00e676",A700:"#00c853"},Ii=WT,UT=["mode","contrastThreshold","tonalOffset"],Tf={text:{primary:"rgba(0, 0, 0, 0.87)",secondary:"rgba(0, 0, 0, 0.6)",disabled:"rgba(0, 0, 0, 0.38)"},divider:"rgba(0, 0, 0, 0.12)",background:{paper:br.white,default:br.white},action:{active:"rgba(0, 0, 0, 0.54)",hover:"rgba(0, 0, 0, 0.04)",hoverOpacity:.04,selected:"rgba(0, 0, 0, 0.08)",selectedOpacity:.08,disabled:"rgba(0, 0, 0, 0.26)",disabledBackground:"rgba(0, 0, 0, 0.12)",disabledOpacity:.38,focus:"rgba(0, 0, 0, 0.12)",focusOpacity:.12,activatedOpacity:.12}},Ml={text:{primary:br.white,secondary:"rgba(255, 255, 255, 0.7)",disabled:"rgba(255, 255, 255, 0.5)",icon:"rgba(255, 255, 255, 0.5)"},divider:"rgba(255, 255, 255, 0.12)",background:{paper:"#121212",default:"#121212"},action:{active:br.white,hover:"rgba(255, 255, 255, 0.08)",hoverOpacity:.08,selected:"rgba(255, 255, 255, 0.16)",selectedOpacity:.16,disabled:"rgba(255, 255, 255, 0.3)",disabledBackground:"rgba(255, 255, 255, 0.12)",disabledOpacity:.38,focus:"rgba(255, 255, 255, 0.12)",focusOpacity:.12,activatedOpacity:.24}};function xf(e,t,a,i){const s=i.light||i,r=i.dark||i*1.5;e[t]||(e.hasOwnProperty(a)?e[t]=e[a]:t==="light"?e.light=ET(e.main,s):t==="dark"&&(e.dark=RT(e.main,r)))}function HT(e="light"){return e==="dark"?{main:Pi[200],light:Pi[50],dark:Pi[400]}:{main:Pi[700],light:Pi[400],dark:Pi[800]}}function VT(e="light"){return e==="dark"?{main:Si[200],light:Si[50],dark:Si[400]}:{main:Si[500],light:Si[300],dark:Si[700]}}function KT(e="light"){return e==="dark"?{main:Ci[500],light:Ci[300],dark:Ci[700]}:{main:Ci[700],light:Ci[400],dark:Ci[800]}}function GT(e="light"){return e==="dark"?{main:Ai[400],light:Ai[300],dark:Ai[700]}:{main:Ai[700],light:Ai[500],dark:Ai[900]}}function XT(e="light"){return e==="dark"?{main:Ii[400],light:Ii[300],dark:Ii[700]}:{main:Ii[800],light:Ii[500],dark:Ii[900]}}function QT(e="light"){return e==="dark"?{main:Ps[400],light:Ps[300],dark:Ps[700]}:{main:"#ed6c02",light:Ps[500],dark:Ps[900]}}function JT(e){const{mode:t="light",contrastThreshold:a=3,tonalOffset:i=.2}=e,s=U(e,UT),r=e.primary||HT(t),n=e.secondary||VT(t),o=e.error||KT(t),l=e.info||GT(t),u=e.success||XT(t),d=e.warning||QT(t);function f(p){return IT(p,Ml.text.primary)>=a?Ml.text.primary:Tf.text.primary}const g=({color:p,name:q,mainShade:y=500,lightShade:h=300,darkShade:_=700})=>{if(p=D({},p),!p.main&&p[y]&&(p.main=p[y]),!p.hasOwnProperty("main"))throw new Error(Ha(11,q?` (${q})`:"",y));if(typeof p.main!="string")throw new Error(Ha(12,q?` (${q})`:"",JSON.stringify(p.main)));return xf(p,"light",h,i),xf(p,"dark",_,i),p.contrastText||(p.contrastText=f(p.main)),p},c={dark:Ml,light:Tf};return Rt(D({common:D({},br),mode:t,primary:g({color:r,name:"primary"}),secondary:g({color:n,name:"secondary",mainShade:"A400",lightShade:"A200",darkShade:"A700"}),error:g({color:o,name:"error"}),warning:g({color:d,name:"warning"}),info:g({color:l,name:"info"}),success:g({color:u,name:"success"}),grey:LT,contrastThreshold:a,getContrastText:f,augmentColor:g,tonalOffset:i},c[t]),s)}const YT=["fontFamily","fontSize","fontWeightLight","fontWeightRegular","fontWeightMedium","fontWeightBold","htmlFontSize","allVariants","pxToRem"];function ZT(e){return Math.round(e*1e5)/1e5}const qf={textTransform:"uppercase"},Df='"Roboto", "Helvetica", "Arial", sans-serif';function ex(e,t){const a=typeof t=="function"?t(e):t,{fontFamily:i=Df,fontSize:s=14,fontWeightLight:r=300,fontWeightRegular:n=400,fontWeightMedium:o=500,fontWeightBold:l=700,htmlFontSize:u=16,allVariants:d,pxToRem:f}=a,g=U(a,YT),c=s/14,m=f||(y=>`${y/u*c}rem`),p=(y,h,_,w,x)=>D({fontFamily:i,fontWeight:y,fontSize:m(h),lineHeight:_},i===Df?{letterSpacing:`${ZT(w/h)}em`}:{},x,d),q={h1:p(r,96,1.167,-1.5),h2:p(r,60,1.2,-.5),h3:p(n,48,1.167,0),h4:p(n,34,1.235,.25),h5:p(n,24,1.334,0),h6:p(o,20,1.6,.15),subtitle1:p(n,16,1.75,.15),subtitle2:p(o,14,1.57,.1),body1:p(n,16,1.5,.15),body2:p(n,14,1.43,.15),button:p(o,14,1.75,.4,qf),caption:p(n,12,1.66,.4),overline:p(n,12,2.66,1,qf),inherit:{fontFamily:"inherit",fontWeight:"inherit",fontSize:"inherit",lineHeight:"inherit",letterSpacing:"inherit"}};return Rt(D({htmlFontSize:u,pxToRem:m,fontFamily:i,fontSize:s,fontWeightLight:r,fontWeightRegular:n,fontWeightMedium:o,fontWeightBold:l},q),g,{clone:!1})}const tx=.2,ax=.14,ix=.12;function me(...e){return[`${e[0]}px ${e[1]}px ${e[2]}px ${e[3]}px rgba(0,0,0,${tx})`,`${e[4]}px ${e[5]}px ${e[6]}px ${e[7]}px rgba(0,0,0,${ax})`,`${e[8]}px ${e[9]}px ${e[10]}px ${e[11]}px rgba(0,0,0,${ix})`].join(",")}const sx=["none",me(0,2,1,-1,0,1,1,0,0,1,3,0),me(0,3,1,-2,0,2,2,0,0,1,5,0),me(0,3,3,-2,0,3,4,0,0,1,8,0),me(0,2,4,-1,0,4,5,0,0,1,10,0),me(0,3,5,-1,0,5,8,0,0,1,14,0),me(0,3,5,-1,0,6,10,0,0,1,18,0),me(0,4,5,-2,0,7,10,1,0,2,16,1),me(0,5,5,-3,0,8,10,1,0,3,14,2),me(0,5,6,-3,0,9,12,1,0,3,16,2),me(0,6,6,-3,0,10,14,1,0,4,18,3),me(0,6,7,-4,0,11,15,1,0,4,20,3),me(0,7,8,-4,0,12,17,2,0,5,22,4),me(0,7,8,-4,0,13,19,2,0,5,24,4),me(0,7,9,-4,0,14,21,2,0,5,26,4),me(0,8,9,-5,0,15,22,2,0,6,28,5),me(0,8,10,-5,0,16,24,2,0,6,30,5),me(0,8,11,-5,0,17,26,2,0,6,32,5),me(0,9,11,-5,0,18,28,2,0,7,34,6),me(0,9,12,-6,0,19,29,2,0,7,36,6),me(0,10,13,-6,0,20,31,3,0,8,38,7),me(0,10,13,-6,0,21,33,3,0,8,40,7),me(0,10,14,-6,0,22,35,3,0,8,42,7),me(0,11,14,-7,0,23,36,3,0,9,44,8),me(0,11,15,-7,0,24,38,3,0,9,46,8)],rx=sx,nx=["duration","easing","delay"],ox={easeInOut:"cubic-bezier(0.4, 0, 0.2, 1)",easeOut:"cubic-bezier(0.0, 0, 0.2, 1)",easeIn:"cubic-bezier(0.4, 0, 1, 1)",sharp:"cubic-bezier(0.4, 0, 0.6, 1)"},lx={shortest:150,shorter:200,short:250,standard:300,complex:375,enteringScreen:225,leavingScreen:195};function kf(e){return`${Math.round(e)}ms`}function ux(e){if(!e)return 0;const t=e/36;return Math.round((4+15*t**.25+t/5)*10)}function dx(e){const t=D({},ox,e.easing),a=D({},lx,e.duration);return D({getAutoHeightDuration:ux,create:(s=["all"],r={})=>{const{duration:n=a.standard,easing:o=t.easeInOut,delay:l=0}=r;return U(r,nx),(Array.isArray(s)?s:[s]).map(u=>`${u} ${typeof n=="string"?n:kf(n)} ${o} ${typeof l=="string"?l:kf(l)}`).join(",")}},e,{easing:t,duration:a})}const cx={mobileStepper:1e3,fab:1050,speedDial:1050,appBar:1100,drawer:1200,modal:1300,snackbar:1400,tooltip:1500},px=cx,fx=["breakpoints","mixins","spacing","palette","transitions","typography","shape"];function hy(e={},...t){const{mixins:a={},palette:i={},transitions:s={},typography:r={}}=e,n=U(e,fx);if(e.vars)throw new Error(Ha(18));const o=JT(i),l=kc(e);let u=Rt(l,{mixins:jT(l.breakpoints,a),palette:o,shadows:rx.slice(),typography:ex(o,r),transitions:dx(s),zIndex:D({},px)});return u=Rt(u,n),u=t.reduce((d,f)=>Rt(d,f),u),u.unstable_sxConfig=D({},Zo,n==null?void 0:n.unstable_sxConfig),u.unstable_sx=function(f){return el({sx:f,theme:this})},u}const mx=hy(),il=mx;function Rr(){const e=tl(il);return e[Ir]||e}function Ne({props:e,name:t}){return CT({props:e,name:t,defaultTheme:il,themeId:Ir})}const sa=e=>Qs(e)&&e!=="classes",hx=Qs,gx=kT({themeId:Ir,defaultTheme:il,rootShouldForwardProp:sa}),V=gx,yx=e=>{let t;return e<1?t=5.11916*e**2:t=4.5*Math.log(e+1)+2,(t/100).toFixed(2)},Sf=yx,_x=hy(),vx=hT({themeId:Ir,defaultTheme:_x,defaultClassName:"MuiBox-root",generateClassName:pc.generate}),bx=vx;function Er({props:e,states:t,muiFormControl:a}){return t.reduce((i,s)=>(i[s]=e[s],a&&typeof e[s]>"u"&&(i[s]=a[s]),i),{})}const wx=b.createContext(void 0),Cc=wx;function jr(){return b.useContext(Cc)}function Tx(e){return Pe("MuiFormLabel",e)}const xx=Te("MuiFormLabel",["root","colorSecondary","focused","disabled","error","filled","required","asterisk"]),Js=xx,qx=["children","className","color","component","disabled","error","filled","focused","required"],Dx=e=>{const{classes:t,color:a,focused:i,disabled:s,error:r,filled:n,required:o}=e,l={root:["root",`color${be(a)}`,s&&"disabled",r&&"error",n&&"filled",i&&"focused",o&&"required"],asterisk:["asterisk",r&&"error"]};return Fe(l,Tx,t)},kx=V("label",{name:"MuiFormLabel",slot:"Root",overridesResolver:({ownerState:e},t)=>D({},t.root,e.color==="secondary"&&t.colorSecondary,e.filled&&t.filled)})(({theme:e,ownerState:t})=>D({color:(e.vars||e).palette.text.secondary},e.typography.body1,{lineHeight:"1.4375em",padding:0,position:"relative",[`&.${Js.focused}`]:{color:(e.vars||e).palette[t.color].main},[`&.${Js.disabled}`]:{color:(e.vars||e).palette.text.disabled},[`&.${Js.error}`]:{color:(e.vars||e).palette.error.main}})),Sx=V("span",{name:"MuiFormLabel",slot:"Asterisk",overridesResolver:(e,t)=>t.asterisk})(({theme:e})=>({[`&.${Js.error}`]:{color:(e.vars||e).palette.error.main}})),Cx=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiFormLabel"}),{children:s,className:r,component:n="label"}=i,o=U(i,qx),l=jr(),u=Er({props:i,muiFormControl:l,states:["color","required","focused","disabled","error","filled"]}),d=D({},i,{color:u.color||"primary",component:n,disabled:u.disabled,error:u.error,filled:u.filled,focused:u.focused,required:u.required}),f=Dx(d);return v.jsxs(kx,D({as:n,ownerState:d,className:Q(f.root,r),ref:a},o,{children:[s,u.required&&v.jsxs(Sx,{ownerState:d,"aria-hidden":!0,className:f.asterisk,children:[" ","*"]})]}))}),Px=Cx;function Ax(e){return Pe("MuiInputLabel",e)}Te("MuiInputLabel",["root","focused","disabled","error","required","asterisk","formControl","sizeSmall","shrink","animated","standard","filled","outlined"]);const Ix=["disableAnimation","margin","shrink","variant","className"],Rx=e=>{const{classes:t,formControl:a,size:i,shrink:s,disableAnimation:r,variant:n,required:o}=e,u=Fe({root:["root",a&&"formControl",!r&&"animated",s&&"shrink",i==="small"&&"sizeSmall",n],asterisk:[o&&"asterisk"]},Ax,t);return D({},t,u)},Ex=V(Px,{shouldForwardProp:e=>sa(e)||e==="classes",name:"MuiInputLabel",slot:"Root",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[{[`& .${Js.asterisk}`]:t.asterisk},t.root,a.formControl&&t.formControl,a.size==="small"&&t.sizeSmall,a.shrink&&t.shrink,!a.disableAnimation&&t.animated,t[a.variant]]}})(({theme:e,ownerState:t})=>D({display:"block",transformOrigin:"top left",whiteSpace:"nowrap",overflow:"hidden",textOverflow:"ellipsis",maxWidth:"100%"},t.formControl&&{position:"absolute",left:0,top:0,transform:"translate(0, 20px) scale(1)"},t.size==="small"&&{transform:"translate(0, 17px) scale(1)"},t.shrink&&{transform:"translate(0, -1.5px) scale(0.75)",transformOrigin:"top left",maxWidth:"133%"},!t.disableAnimation&&{transition:e.transitions.create(["color","transform","max-width"],{duration:e.transitions.duration.shorter,easing:e.transitions.easing.easeOut})},t.variant==="filled"&&D({zIndex:1,pointerEvents:"none",transform:"translate(12px, 16px) scale(1)",maxWidth:"calc(100% - 24px)"},t.size==="small"&&{transform:"translate(12px, 13px) scale(1)"},t.shrink&&D({userSelect:"none",pointerEvents:"auto",transform:"translate(12px, 7px) scale(0.75)",maxWidth:"calc(133% - 24px)"},t.size==="small"&&{transform:"translate(12px, 4px) scale(0.75)"})),t.variant==="outlined"&&D({zIndex:1,pointerEvents:"none",transform:"translate(14px, 16px) scale(1)",maxWidth:"calc(100% - 24px)"},t.size==="small"&&{transform:"translate(14px, 9px) scale(1)"},t.shrink&&{userSelect:"none",pointerEvents:"auto",maxWidth:"calc(133% - 32px)",transform:"translate(14px, -9px) scale(0.75)"}))),jx=b.forwardRef(function(t,a){const i=Ne({name:"MuiInputLabel",props:t}),{disableAnimation:s=!1,shrink:r,className:n}=i,o=U(i,Ix),l=jr();let u=r;typeof u>"u"&&l&&(u=l.filled||l.focused||l.adornedStart);const d=Er({props:i,muiFormControl:l,states:["size","variant","required"]}),f=D({},i,{disableAnimation:s,formControl:l,shrink:u,size:d.size,variant:d.variant,required:d.required}),g=Rx(f);return v.jsx(Ex,D({"data-shrink":u,ownerState:f,ref:a,className:Q(g.root,n)},o,{classes:g}))}),Fx=jx,Nx=b.createContext({}),Yu=Nx;function Zu(e,t){return Zu=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(i,s){return i.__proto__=s,i},Zu(e,t)}function gy(e,t){e.prototype=Object.create(t.prototype),e.prototype.constructor=e,Zu(e,t)}const Cf={disabled:!1},Zn=Wt.createContext(null);var Lx=function(t){return t.scrollTop},Os="unmounted",ii="exited",si="entering",ji="entered",ed="exiting",ba=function(e){gy(t,e);function t(i,s){var r;r=e.call(this,i,s)||this;var n=s,o=n&&!n.isMounting?i.enter:i.appear,l;return r.appearStatus=null,i.in?o?(l=ii,r.appearStatus=si):l=ji:i.unmountOnExit||i.mountOnEnter?l=Os:l=ii,r.state={status:l},r.nextCallback=null,r}t.getDerivedStateFromProps=function(s,r){var n=s.in;return n&&r.status===Os?{status:ii}:null};var a=t.prototype;return a.componentDidMount=function(){this.updateStatus(!0,this.appearStatus)},a.componentDidUpdate=function(s){var r=null;if(s!==this.props){var n=this.state.status;this.props.in?n!==si&&n!==ji&&(r=si):(n===si||n===ji)&&(r=ed)}this.updateStatus(!1,r)},a.componentWillUnmount=function(){this.cancelNextCallback()},a.getTimeouts=function(){var s=this.props.timeout,r,n,o;return r=n=o=s,s!=null&&typeof s!="number"&&(r=s.exit,n=s.enter,o=s.appear!==void 0?s.appear:n),{exit:r,enter:n,appear:o}},a.updateStatus=function(s,r){if(s===void 0&&(s=!1),r!==null)if(this.cancelNextCallback(),r===si){if(this.props.unmountOnExit||this.props.mountOnEnter){var n=this.props.nodeRef?this.props.nodeRef.current:Yr.findDOMNode(this);n&&Lx(n)}this.performEnter(s)}else this.performExit();else this.props.unmountOnExit&&this.state.status===ii&&this.setState({status:Os})},a.performEnter=function(s){var r=this,n=this.props.enter,o=this.context?this.context.isMounting:s,l=this.props.nodeRef?[o]:[Yr.findDOMNode(this),o],u=l[0],d=l[1],f=this.getTimeouts(),g=o?f.appear:f.enter;if(!s&&!n||Cf.disabled){this.safeSetState({status:ji},function(){r.props.onEntered(u)});return}this.props.onEnter(u,d),this.safeSetState({status:si},function(){r.props.onEntering(u,d),r.onTransitionEnd(g,function(){r.safeSetState({status:ji},function(){r.props.onEntered(u,d)})})})},a.performExit=function(){var s=this,r=this.props.exit,n=this.getTimeouts(),o=this.props.nodeRef?void 0:Yr.findDOMNode(this);if(!r||Cf.disabled){this.safeSetState({status:ii},function(){s.props.onExited(o)});return}this.props.onExit(o),this.safeSetState({status:ed},function(){s.props.onExiting(o),s.onTransitionEnd(n.exit,function(){s.safeSetState({status:ii},function(){s.props.onExited(o)})})})},a.cancelNextCallback=function(){this.nextCallback!==null&&(this.nextCallback.cancel(),this.nextCallback=null)},a.safeSetState=function(s,r){r=this.setNextCallback(r),this.setState(s,r)},a.setNextCallback=function(s){var r=this,n=!0;return this.nextCallback=function(o){n&&(n=!1,r.nextCallback=null,s(o))},this.nextCallback.cancel=function(){n=!1},this.nextCallback},a.onTransitionEnd=function(s,r){this.setNextCallback(r);var n=this.props.nodeRef?this.props.nodeRef.current:Yr.findDOMNode(this),o=s==null&&!this.props.addEndListener;if(!n||o){setTimeout(this.nextCallback,0);return}if(this.props.addEndListener){var l=this.props.nodeRef?[this.nextCallback]:[n,this.nextCallback],u=l[0],d=l[1];this.props.addEndListener(u,d)}s!=null&&setTimeout(this.nextCallback,s)},a.render=function(){var s=this.state.status;if(s===Os)return null;var r=this.props,n=r.children;r.in,r.mountOnEnter,r.unmountOnExit,r.appear,r.enter,r.exit,r.timeout,r.addEndListener,r.onEnter,r.onEntering,r.onEntered,r.onExit,r.onExiting,r.onExited,r.nodeRef;var o=U(r,["children","in","mountOnEnter","unmountOnExit","appear","enter","exit","timeout","addEndListener","onEnter","onEntering","onEntered","onExit","onExiting","onExited","nodeRef"]);return Wt.createElement(Zn.Provider,{value:null},typeof n=="function"?n(s,o):Wt.cloneElement(Wt.Children.only(n),o))},t}(Wt.Component);ba.contextType=Zn;ba.propTypes={};function Ri(){}ba.defaultProps={in:!1,mountOnEnter:!1,unmountOnExit:!1,appear:!1,enter:!0,exit:!0,onEnter:Ri,onEntering:Ri,onEntered:Ri,onExit:Ri,onExiting:Ri,onExited:Ri};ba.UNMOUNTED=Os;ba.EXITED=ii;ba.ENTERING=si;ba.ENTERED=ji;ba.EXITING=ed;const Pc=ba;function Mx(e){if(e===void 0)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function Ac(e,t){var a=function(r){return t&&b.isValidElement(r)?t(r):r},i=Object.create(null);return e&&b.Children.map(e,function(s){return s}).forEach(function(s){i[s.key]=a(s)}),i}function Ox(e,t){e=e||{},t=t||{};function a(d){return d in t?t[d]:e[d]}var i=Object.create(null),s=[];for(var r in e)r in t?s.length&&(i[r]=s,s=[]):s.push(r);var n,o={};for(var l in t){if(i[l])for(n=0;n{if(!o&&l!=null){const p=setTimeout(l,u);return()=>{clearTimeout(p)}}},[l,o,u]),v.jsx("span",{className:g,style:c,children:v.jsx("span",{className:m})})}const Vx=Te("MuiTouchRipple",["root","ripple","rippleVisible","ripplePulsate","child","childLeaving","childPulsate"]),St=Vx,Kx=["center","classes","className"];let sl=e=>e,Pf,Af,If,Rf;const td=550,Gx=80,Xx=wc(Pf||(Pf=sl` + 0% { + transform: scale(0); + opacity: 0.1; + } + + 100% { + transform: scale(1); + opacity: 0.3; + } +`)),Qx=wc(Af||(Af=sl` + 0% { + opacity: 1; + } + + 100% { + opacity: 0; + } +`)),Jx=wc(If||(If=sl` + 0% { + transform: scale(1); + } + + 50% { + transform: scale(0.92); + } + + 100% { + transform: scale(1); + } +`)),Yx=V("span",{name:"MuiTouchRipple",slot:"Root"})({overflow:"hidden",pointerEvents:"none",position:"absolute",zIndex:0,top:0,right:0,bottom:0,left:0,borderRadius:"inherit"}),Zx=V(Hx,{name:"MuiTouchRipple",slot:"Ripple"})(Rf||(Rf=sl` + opacity: 0; + position: absolute; + + &.${0} { + opacity: 0.3; + transform: scale(1); + animation-name: ${0}; + animation-duration: ${0}ms; + animation-timing-function: ${0}; + } + + &.${0} { + animation-duration: ${0}ms; + } + + & .${0} { + opacity: 1; + display: block; + width: 100%; + height: 100%; + border-radius: 50%; + background-color: currentColor; + } + + & .${0} { + opacity: 0; + animation-name: ${0}; + animation-duration: ${0}ms; + animation-timing-function: ${0}; + } + + & .${0} { + position: absolute; + /* @noflip */ + left: 0px; + top: 0; + animation-name: ${0}; + animation-duration: 2500ms; + animation-timing-function: ${0}; + animation-iteration-count: infinite; + animation-delay: 200ms; + } +`),St.rippleVisible,Xx,td,({theme:e})=>e.transitions.easing.easeInOut,St.ripplePulsate,({theme:e})=>e.transitions.duration.shorter,St.child,St.childLeaving,Qx,td,({theme:e})=>e.transitions.easing.easeInOut,St.childPulsate,Jx,({theme:e})=>e.transitions.easing.easeInOut),eq=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiTouchRipple"}),{center:s=!1,classes:r={},className:n}=i,o=U(i,Kx),[l,u]=b.useState([]),d=b.useRef(0),f=b.useRef(null);b.useEffect(()=>{f.current&&(f.current(),f.current=null)},[l]);const g=b.useRef(!1),c=b.useRef(0),m=b.useRef(null),p=b.useRef(null);b.useEffect(()=>()=>{c.current&&clearTimeout(c.current)},[]);const q=b.useCallback(w=>{const{pulsate:x,rippleX:k,rippleY:T,rippleSize:S,cb:A}=w;u(P=>[...P,v.jsx(Zx,{classes:{ripple:Q(r.ripple,St.ripple),rippleVisible:Q(r.rippleVisible,St.rippleVisible),ripplePulsate:Q(r.ripplePulsate,St.ripplePulsate),child:Q(r.child,St.child),childLeaving:Q(r.childLeaving,St.childLeaving),childPulsate:Q(r.childPulsate,St.childPulsate)},timeout:td,pulsate:x,rippleX:k,rippleY:T,rippleSize:S},d.current)]),d.current+=1,f.current=A},[r]),y=b.useCallback((w={},x={},k=()=>{})=>{const{pulsate:T=!1,center:S=s||x.pulsate,fakeElement:A=!1}=x;if((w==null?void 0:w.type)==="mousedown"&&g.current){g.current=!1;return}(w==null?void 0:w.type)==="touchstart"&&(g.current=!0);const P=A?null:p.current,j=P?P.getBoundingClientRect():{width:0,height:0,left:0,top:0};let O,z,R;if(S||w===void 0||w.clientX===0&&w.clientY===0||!w.clientX&&!w.touches)O=Math.round(j.width/2),z=Math.round(j.height/2);else{const{clientX:F,clientY:M}=w.touches&&w.touches.length>0?w.touches[0]:w;O=Math.round(F-j.left),z=Math.round(M-j.top)}if(S)R=Math.sqrt((2*j.width**2+j.height**2)/3),R%2===0&&(R+=1);else{const F=Math.max(Math.abs((P?P.clientWidth:0)-O),O)*2+2,M=Math.max(Math.abs((P?P.clientHeight:0)-z),z)*2+2;R=Math.sqrt(F**2+M**2)}w!=null&&w.touches?m.current===null&&(m.current=()=>{q({pulsate:T,rippleX:O,rippleY:z,rippleSize:R,cb:k})},c.current=setTimeout(()=>{m.current&&(m.current(),m.current=null)},Gx)):q({pulsate:T,rippleX:O,rippleY:z,rippleSize:R,cb:k})},[s,q]),h=b.useCallback(()=>{y({},{pulsate:!0})},[y]),_=b.useCallback((w,x)=>{if(clearTimeout(c.current),(w==null?void 0:w.type)==="touchend"&&m.current){m.current(),m.current=null,c.current=setTimeout(()=>{_(w,x)});return}m.current=null,u(k=>k.length>0?k.slice(1):k),f.current=x},[]);return b.useImperativeHandle(a,()=>({pulsate:h,start:y,stop:_}),[h,y,_]),v.jsx(Yx,D({className:Q(St.root,r.root,n),ref:p},o,{children:v.jsx(Ux,{component:null,exit:!0,children:l})}))}),tq=eq;function aq(e){return Pe("MuiButtonBase",e)}const iq=Te("MuiButtonBase",["root","disabled","focusVisible"]),sq=iq,rq=["action","centerRipple","children","className","component","disabled","disableRipple","disableTouchRipple","focusRipple","focusVisibleClassName","LinkComponent","onBlur","onClick","onContextMenu","onDragLeave","onFocus","onFocusVisible","onKeyDown","onKeyUp","onMouseDown","onMouseLeave","onMouseUp","onTouchEnd","onTouchMove","onTouchStart","tabIndex","TouchRippleProps","touchRippleRef","type"],nq=e=>{const{disabled:t,focusVisible:a,focusVisibleClassName:i,classes:s}=e,n=Fe({root:["root",t&&"disabled",a&&"focusVisible"]},aq,s);return a&&i&&(n.root+=` ${i}`),n},oq=V("button",{name:"MuiButtonBase",slot:"Root",overridesResolver:(e,t)=>t.root})({display:"inline-flex",alignItems:"center",justifyContent:"center",position:"relative",boxSizing:"border-box",WebkitTapHighlightColor:"transparent",backgroundColor:"transparent",outline:0,border:0,margin:0,borderRadius:0,padding:0,cursor:"pointer",userSelect:"none",verticalAlign:"middle",MozAppearance:"none",WebkitAppearance:"none",textDecoration:"none",color:"inherit","&::-moz-focus-inner":{borderStyle:"none"},[`&.${sq.disabled}`]:{pointerEvents:"none",cursor:"default"},"@media print":{colorAdjust:"exact"}}),lq=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiButtonBase"}),{action:s,centerRipple:r=!1,children:n,className:o,component:l="button",disabled:u=!1,disableRipple:d=!1,disableTouchRipple:f=!1,focusRipple:g=!1,LinkComponent:c="a",onBlur:m,onClick:p,onContextMenu:q,onDragLeave:y,onFocus:h,onFocusVisible:_,onKeyDown:w,onKeyUp:x,onMouseDown:k,onMouseLeave:T,onMouseUp:S,onTouchEnd:A,onTouchMove:P,onTouchStart:j,tabIndex:O=0,TouchRippleProps:z,touchRippleRef:R,type:F}=i,M=U(i,rq),N=b.useRef(null),C=b.useRef(null),I=Xe(C,R),{isFocusVisibleRef:L,onFocus:K,onBlur:X,ref:Ae}=$g(),[ue,xe]=b.useState(!1);u&&ue&&xe(!1),b.useImperativeHandle(s,()=>({focusVisible:()=>{xe(!0),N.current.focus()}}),[]);const[re,rt]=b.useState(!1);b.useEffect(()=>{rt(!0)},[]);const Ya=re&&!d&&!u;b.useEffect(()=>{ue&&g&&!d&&re&&C.current.pulsate()},[d,g,ue,re]);function Be(W,$,de=f){return ci(Ie=>($&&$(Ie),!de&&C.current&&C.current[W](Ie),!0))}const xt=Be("start",k),ne=Be("stop",q),G=Be("stop",y),ge=Be("stop",S),qe=Be("stop",W=>{ue&&W.preventDefault(),T&&T(W)}),qt=Be("start",j),Dt=Be("stop",A),kt=Be("stop",P),Nt=Be("stop",W=>{X(W),L.current===!1&&xe(!1),m&&m(W)},!1),wa=ci(W=>{N.current||(N.current=W.currentTarget),K(W),L.current===!0&&(xe(!0),_&&_(W)),h&&h(W)}),Ue=()=>{const W=N.current;return l&&l!=="button"&&!(W.tagName==="A"&&W.href)},Le=b.useRef(!1),Lt=ci(W=>{g&&!Le.current&&ue&&C.current&&W.key===" "&&(Le.current=!0,C.current.stop(W,()=>{C.current.start(W)})),W.target===W.currentTarget&&Ue()&&W.key===" "&&W.preventDefault(),w&&w(W),W.target===W.currentTarget&&Ue()&&W.key==="Enter"&&!u&&(W.preventDefault(),p&&p(W))}),ye=ci(W=>{g&&W.key===" "&&C.current&&ue&&!W.defaultPrevented&&(Le.current=!1,C.current.stop(W,()=>{C.current.pulsate(W)})),x&&x(W),p&&W.target===W.currentTarget&&Ue()&&W.key===" "&&!W.defaultPrevented&&p(W)});let ra=l;ra==="button"&&(M.href||M.to)&&(ra=c);const Mt={};ra==="button"?(Mt.type=F===void 0?"button":F,Mt.disabled=u):(!M.href&&!M.to&&(Mt.role="button"),u&&(Mt["aria-disabled"]=u));const na=Xe(a,Ae,N),Ta=D({},i,{centerRipple:r,component:l,disabled:u,disableRipple:d,disableTouchRipple:f,focusRipple:g,tabIndex:O,focusVisible:ue}),oe=nq(Ta);return v.jsxs(oq,D({as:ra,className:Q(oe.root,o),ownerState:Ta,onBlur:Nt,onClick:p,onContextMenu:ne,onFocus:wa,onKeyDown:Lt,onKeyUp:ye,onMouseDown:xt,onMouseLeave:qe,onMouseUp:ge,onDragLeave:G,onTouchEnd:Dt,onTouchMove:kt,onTouchStart:qt,ref:na,tabIndex:u?-1:O,type:F},Mt,M,{children:[n,Ya?v.jsx(tq,D({ref:I,center:r},z)):null]}))}),uq=lq;function dq(e){return Pe("MuiDivider",e)}const cq=Te("MuiDivider",["root","absolute","fullWidth","inset","middle","flexItem","light","vertical","withChildren","withChildrenVertical","textAlignRight","textAlignLeft","wrapper","wrapperVertical"]),Ef=cq,pq=["absolute","children","className","component","flexItem","light","orientation","role","textAlign","variant"],fq=e=>{const{absolute:t,children:a,classes:i,flexItem:s,light:r,orientation:n,textAlign:o,variant:l}=e;return Fe({root:["root",t&&"absolute",l,r&&"light",n==="vertical"&&"vertical",s&&"flexItem",a&&"withChildren",a&&n==="vertical"&&"withChildrenVertical",o==="right"&&n!=="vertical"&&"textAlignRight",o==="left"&&n!=="vertical"&&"textAlignLeft"],wrapper:["wrapper",n==="vertical"&&"wrapperVertical"]},dq,i)},mq=V("div",{name:"MuiDivider",slot:"Root",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.root,a.absolute&&t.absolute,t[a.variant],a.light&&t.light,a.orientation==="vertical"&&t.vertical,a.flexItem&&t.flexItem,a.children&&t.withChildren,a.children&&a.orientation==="vertical"&&t.withChildrenVertical,a.textAlign==="right"&&a.orientation!=="vertical"&&t.textAlignRight,a.textAlign==="left"&&a.orientation!=="vertical"&&t.textAlignLeft]}})(({theme:e,ownerState:t})=>D({margin:0,flexShrink:0,borderWidth:0,borderStyle:"solid",borderColor:(e.vars||e).palette.divider,borderBottomWidth:"thin"},t.absolute&&{position:"absolute",bottom:0,left:0,width:"100%"},t.light&&{borderColor:e.vars?`rgba(${e.vars.palette.dividerChannel} / 0.08)`:pi(e.palette.divider,.08)},t.variant==="inset"&&{marginLeft:72},t.variant==="middle"&&t.orientation==="horizontal"&&{marginLeft:e.spacing(2),marginRight:e.spacing(2)},t.variant==="middle"&&t.orientation==="vertical"&&{marginTop:e.spacing(1),marginBottom:e.spacing(1)},t.orientation==="vertical"&&{height:"100%",borderBottomWidth:0,borderRightWidth:"thin"},t.flexItem&&{alignSelf:"stretch",height:"auto"}),({ownerState:e})=>D({},e.children&&{display:"flex",whiteSpace:"nowrap",textAlign:"center",border:0,"&::before, &::after":{content:'""',alignSelf:"center"}}),({theme:e,ownerState:t})=>D({},t.children&&t.orientation!=="vertical"&&{"&::before, &::after":{width:"100%",borderTop:`thin solid ${(e.vars||e).palette.divider}`}}),({theme:e,ownerState:t})=>D({},t.children&&t.orientation==="vertical"&&{flexDirection:"column","&::before, &::after":{height:"100%",borderLeft:`thin solid ${(e.vars||e).palette.divider}`}}),({ownerState:e})=>D({},e.textAlign==="right"&&e.orientation!=="vertical"&&{"&::before":{width:"90%"},"&::after":{width:"10%"}},e.textAlign==="left"&&e.orientation!=="vertical"&&{"&::before":{width:"10%"},"&::after":{width:"90%"}})),hq=V("span",{name:"MuiDivider",slot:"Wrapper",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.wrapper,a.orientation==="vertical"&&t.wrapperVertical]}})(({theme:e,ownerState:t})=>D({display:"inline-block",paddingLeft:`calc(${e.spacing(1)} * 1.2)`,paddingRight:`calc(${e.spacing(1)} * 1.2)`},t.orientation==="vertical"&&{paddingTop:`calc(${e.spacing(1)} * 1.2)`,paddingBottom:`calc(${e.spacing(1)} * 1.2)`})),yy=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiDivider"}),{absolute:s=!1,children:r,className:n,component:o=r?"div":"hr",flexItem:l=!1,light:u=!1,orientation:d="horizontal",role:f=o!=="hr"?"separator":void 0,textAlign:g="center",variant:c="fullWidth"}=i,m=U(i,pq),p=D({},i,{absolute:s,component:o,flexItem:l,light:u,orientation:d,role:f,textAlign:g,variant:c}),q=fq(p);return v.jsx(mq,D({as:o,className:Q(q.root,n),role:f,ref:a,ownerState:p},m,{children:r?v.jsx(hq,{className:q.wrapper,ownerState:p,children:r}):null}))});yy.muiSkipListHighlight=!0;const jf=yy,gq=Te("MuiListItemIcon",["root","alignItemsFlexStart"]),Ff=gq,yq=Te("MuiListItemText",["root","multiline","dense","inset","primary","secondary"]),Nf=yq;function _q(e){return Pe("MuiMenuItem",e)}const vq=Te("MuiMenuItem",["root","focusVisible","dense","disabled","divider","gutters","selected"]),As=vq,bq=["autoFocus","component","dense","divider","disableGutters","focusVisibleClassName","role","tabIndex","className"],wq=(e,t)=>{const{ownerState:a}=e;return[t.root,a.dense&&t.dense,a.divider&&t.divider,!a.disableGutters&&t.gutters]},Tq=e=>{const{disabled:t,dense:a,divider:i,disableGutters:s,selected:r,classes:n}=e,l=Fe({root:["root",a&&"dense",t&&"disabled",!s&&"gutters",i&&"divider",r&&"selected"]},_q,n);return D({},n,l)},xq=V(uq,{shouldForwardProp:e=>sa(e)||e==="classes",name:"MuiMenuItem",slot:"Root",overridesResolver:wq})(({theme:e,ownerState:t})=>D({},e.typography.body1,{display:"flex",justifyContent:"flex-start",alignItems:"center",position:"relative",textDecoration:"none",minHeight:48,paddingTop:6,paddingBottom:6,boxSizing:"border-box",whiteSpace:"nowrap"},!t.disableGutters&&{paddingLeft:16,paddingRight:16},t.divider&&{borderBottom:`1px solid ${(e.vars||e).palette.divider}`,backgroundClip:"padding-box"},{"&:hover":{textDecoration:"none",backgroundColor:(e.vars||e).palette.action.hover,"@media (hover: none)":{backgroundColor:"transparent"}},[`&.${As.selected}`]:{backgroundColor:e.vars?`rgba(${e.vars.palette.primary.mainChannel} / ${e.vars.palette.action.selectedOpacity})`:pi(e.palette.primary.main,e.palette.action.selectedOpacity),[`&.${As.focusVisible}`]:{backgroundColor:e.vars?`rgba(${e.vars.palette.primary.mainChannel} / calc(${e.vars.palette.action.selectedOpacity} + ${e.vars.palette.action.focusOpacity}))`:pi(e.palette.primary.main,e.palette.action.selectedOpacity+e.palette.action.focusOpacity)}},[`&.${As.selected}:hover`]:{backgroundColor:e.vars?`rgba(${e.vars.palette.primary.mainChannel} / calc(${e.vars.palette.action.selectedOpacity} + ${e.vars.palette.action.hoverOpacity}))`:pi(e.palette.primary.main,e.palette.action.selectedOpacity+e.palette.action.hoverOpacity),"@media (hover: none)":{backgroundColor:e.vars?`rgba(${e.vars.palette.primary.mainChannel} / ${e.vars.palette.action.selectedOpacity})`:pi(e.palette.primary.main,e.palette.action.selectedOpacity)}},[`&.${As.focusVisible}`]:{backgroundColor:(e.vars||e).palette.action.focus},[`&.${As.disabled}`]:{opacity:(e.vars||e).palette.action.disabledOpacity},[`& + .${Ef.root}`]:{marginTop:e.spacing(1),marginBottom:e.spacing(1)},[`& + .${Ef.inset}`]:{marginLeft:52},[`& .${Nf.root}`]:{marginTop:0,marginBottom:0},[`& .${Nf.inset}`]:{paddingLeft:36},[`& .${Ff.root}`]:{minWidth:36}},!t.dense&&{[e.breakpoints.up("sm")]:{minHeight:"auto"}},t.dense&&D({minHeight:32,paddingTop:4,paddingBottom:4},e.typography.body2,{[`& .${Ff.root} svg`]:{fontSize:"1.25rem"}}))),qq=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiMenuItem"}),{autoFocus:s=!1,component:r="li",dense:n=!1,divider:o=!1,disableGutters:l=!1,focusVisibleClassName:u,role:d="menuitem",tabIndex:f,className:g}=i,c=U(i,bq),m=b.useContext(Yu),p=b.useMemo(()=>({dense:n||m.dense||!1,disableGutters:l}),[m.dense,n,l]),q=b.useRef(null);Va(()=>{s&&q.current&&q.current.focus()},[s]);const y=D({},i,{dense:p.dense,divider:o,disableGutters:l}),h=Tq(i),_=Xe(q,a);let w;return i.disabled||(w=f!==void 0?f:-1),v.jsx(Yu.Provider,{value:p,children:v.jsx(xq,D({ref:_,role:d,tabIndex:w,component:r,focusVisibleClassName:Q(h.focusVisible,u),className:Q(h.root,g)},c,{ownerState:y,classes:h}))})}),Lf=qq;function Mf(e){return e!=null&&!(Array.isArray(e)&&e.length===0)}function eo(e,t=!1){return e&&(Mf(e.value)&&e.value!==""||t&&Mf(e.defaultValue)&&e.defaultValue!=="")}function Dq(e){return e.startAdornment}function kq(e){return Pe("MuiFormControl",e)}Te("MuiFormControl",["root","marginNone","marginNormal","marginDense","fullWidth","disabled"]);const Sq=["children","className","color","component","disabled","error","focused","fullWidth","hiddenLabel","margin","required","size","variant"],Cq=e=>{const{classes:t,margin:a,fullWidth:i}=e,s={root:["root",a!=="none"&&`margin${be(a)}`,i&&"fullWidth"]};return Fe(s,kq,t)},Pq=V("div",{name:"MuiFormControl",slot:"Root",overridesResolver:({ownerState:e},t)=>D({},t.root,t[`margin${be(e.margin)}`],e.fullWidth&&t.fullWidth)})(({ownerState:e})=>D({display:"inline-flex",flexDirection:"column",position:"relative",minWidth:0,padding:0,margin:0,border:0,verticalAlign:"top"},e.margin==="normal"&&{marginTop:16,marginBottom:8},e.margin==="dense"&&{marginTop:8,marginBottom:4},e.fullWidth&&{width:"100%"})),Aq=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiFormControl"}),{children:s,className:r,color:n="primary",component:o="div",disabled:l=!1,error:u=!1,focused:d,fullWidth:f=!1,hiddenLabel:g=!1,margin:c="none",required:m=!1,size:p="medium",variant:q="outlined"}=i,y=U(i,Sq),h=D({},i,{color:n,component:o,disabled:l,error:u,fullWidth:f,hiddenLabel:g,margin:c,required:m,size:p,variant:q}),_=Cq(h),[w,x]=b.useState(()=>{let z=!1;return s&&b.Children.forEach(s,R=>{if(!gn(R,["Input","Select"]))return;const F=gn(R,["Select"])?R.props.input:R;F&&Dq(F.props)&&(z=!0)}),z}),[k,T]=b.useState(()=>{let z=!1;return s&&b.Children.forEach(s,R=>{gn(R,["Input","Select"])&&(eo(R.props,!0)||eo(R.props.inputProps,!0))&&(z=!0)}),z}),[S,A]=b.useState(!1);l&&S&&A(!1);const P=d!==void 0&&!l?d:S;let j;const O=b.useMemo(()=>({adornedStart:w,setAdornedStart:x,color:n,disabled:l,error:u,filled:k,focused:P,fullWidth:f,hiddenLabel:g,size:p,onBlur:()=>{A(!1)},onEmpty:()=>{T(!1)},onFilled:()=>{T(!0)},onFocus:()=>{A(!0)},registerEffect:j,required:m,variant:q}),[w,n,l,u,k,P,f,g,j,m,p,q]);return v.jsx(Cc.Provider,{value:O,children:v.jsx(Pq,D({as:o,ownerState:h,className:Q(_.root,r),ref:a},y,{children:s}))})}),Iq=Aq;function wr(e){return typeof e=="string"}function Rq(e,t,a){return e===void 0||wr(e)?t:D({},t,{ownerState:D({},t.ownerState,a)})}const Eq={disableDefaultClasses:!1},jq=b.createContext(Eq);function Fq(e){const{disableDefaultClasses:t}=b.useContext(jq);return a=>t?"":e(a)}function _y(e,t=[]){if(e===void 0)return{};const a={};return Object.keys(e).filter(i=>i.match(/^on[A-Z]/)&&typeof e[i]=="function"&&!t.includes(i)).forEach(i=>{a[i]=e[i]}),a}function ad(e,t,a){return typeof e=="function"?e(t,a):e}function Of(e){if(e===void 0)return{};const t={};return Object.keys(e).filter(a=>!(a.match(/^on[A-Z]/)&&typeof e[a]=="function")).forEach(a=>{t[a]=e[a]}),t}function Nq(e){const{getSlotProps:t,additionalProps:a,externalSlotProps:i,externalForwardedProps:s,className:r}=e;if(!t){const c=Q(s==null?void 0:s.className,i==null?void 0:i.className,r,a==null?void 0:a.className),m=D({},a==null?void 0:a.style,s==null?void 0:s.style,i==null?void 0:i.style),p=D({},a,s,i);return c.length>0&&(p.className=c),Object.keys(m).length>0&&(p.style=m),{props:p,internalRef:void 0}}const n=_y(D({},s,i)),o=Of(i),l=Of(s),u=t(n),d=Q(u==null?void 0:u.className,a==null?void 0:a.className,r,s==null?void 0:s.className,i==null?void 0:i.className),f=D({},u==null?void 0:u.style,a==null?void 0:a.style,s==null?void 0:s.style,i==null?void 0:i.style),g=D({},u,a,l,o);return d.length>0&&(g.className=d),Object.keys(f).length>0&&(g.style=f),{props:g,internalRef:u.ref}}const Lq=["elementType","externalSlotProps","ownerState","skipResolvingSlotProps"];function us(e){var t;const{elementType:a,externalSlotProps:i,ownerState:s,skipResolvingSlotProps:r=!1}=e,n=U(e,Lq),o=r?{}:ad(i,s),{props:l,internalRef:u}=Nq(D({},n,{externalSlotProps:o})),d=Xe(u,o==null?void 0:o.ref,(t=e.additionalProps)==null?void 0:t.ref);return Rq(a,D({},l,{ref:d}),s)}function Mq(e){return Pe("MuiList",e)}Te("MuiList",["root","padding","dense","subheader"]);const Oq=["children","className","component","dense","disablePadding","subheader"],Bq=e=>{const{classes:t,disablePadding:a,dense:i,subheader:s}=e;return Fe({root:["root",!a&&"padding",i&&"dense",s&&"subheader"]},Mq,t)},zq=V("ul",{name:"MuiList",slot:"Root",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.root,!a.disablePadding&&t.padding,a.dense&&t.dense,a.subheader&&t.subheader]}})(({ownerState:e})=>D({listStyle:"none",margin:0,padding:0,position:"relative"},!e.disablePadding&&{paddingTop:8,paddingBottom:8},e.subheader&&{paddingTop:0})),$q=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiList"}),{children:s,className:r,component:n="ul",dense:o=!1,disablePadding:l=!1,subheader:u}=i,d=U(i,Oq),f=b.useMemo(()=>({dense:o}),[o]),g=D({},i,{component:n,dense:o,disablePadding:l}),c=Bq(g);return v.jsx(Yu.Provider,{value:f,children:v.jsxs(zq,D({as:n,className:Q(c.root,r),ref:a,ownerState:g},d,{children:[u,s]}))})}),Wq=$q,Uq=["actions","autoFocus","autoFocusItem","children","className","disabledItemsFocusable","disableListWrap","onKeyDown","variant"];function Ol(e,t,a){return e===t?e.firstChild:t&&t.nextElementSibling?t.nextElementSibling:a?null:e.firstChild}function Bf(e,t,a){return e===t?a?e.firstChild:e.lastChild:t&&t.previousElementSibling?t.previousElementSibling:a?null:e.lastChild}function vy(e,t){if(t===void 0)return!0;let a=e.innerText;return a===void 0&&(a=e.textContent),a=a.trim().toLowerCase(),a.length===0?!1:t.repeating?a[0]===t.keys[0]:a.indexOf(t.keys.join(""))===0}function Is(e,t,a,i,s,r){let n=!1,o=s(e,t,t?a:!1);for(;o;){if(o===e.firstChild){if(n)return!1;n=!0}const l=i?!1:o.disabled||o.getAttribute("aria-disabled")==="true";if(!o.hasAttribute("tabindex")||!vy(o,r)||l)o=s(e,o,a);else return o.focus(),!0}return!1}const Hq=b.forwardRef(function(t,a){const{actions:i,autoFocus:s=!1,autoFocusItem:r=!1,children:n,className:o,disabledItemsFocusable:l=!1,disableListWrap:u=!1,onKeyDown:d,variant:f="selectedMenu"}=t,g=U(t,Uq),c=b.useRef(null),m=b.useRef({keys:[],repeating:!0,previousKeyMatched:!0,lastTime:null});Va(()=>{s&&c.current.focus()},[s]),b.useImperativeHandle(i,()=>({adjustStyleForScrollbar:(_,w)=>{const x=!c.current.style.width;if(_.clientHeight{const w=c.current,x=_.key,k=ct(w).activeElement;if(x==="ArrowDown")_.preventDefault(),Is(w,k,u,l,Ol);else if(x==="ArrowUp")_.preventDefault(),Is(w,k,u,l,Bf);else if(x==="Home")_.preventDefault(),Is(w,null,u,l,Ol);else if(x==="End")_.preventDefault(),Is(w,null,u,l,Bf);else if(x.length===1){const T=m.current,S=x.toLowerCase(),A=performance.now();T.keys.length>0&&(A-T.lastTime>500?(T.keys=[],T.repeating=!0,T.previousKeyMatched=!0):T.repeating&&S!==T.keys[0]&&(T.repeating=!1)),T.lastTime=A,T.keys.push(S);const P=k&&!T.repeating&&vy(k,T);T.previousKeyMatched&&(P||Is(w,k,!1,l,Ol,T))?_.preventDefault():T.previousKeyMatched=!1}d&&d(_)},q=Xe(c,a);let y=-1;b.Children.forEach(n,(_,w)=>{if(!b.isValidElement(_)){y===w&&(y+=1,y>=n.length&&(y=-1));return}_.props.disabled||(f==="selectedMenu"&&_.props.selected||y===-1)&&(y=w),y===w&&(_.props.disabled||_.props.muiSkipListHighlight||_.type.muiSkipListHighlight)&&(y+=1,y>=n.length&&(y=-1))});const h=b.Children.map(n,(_,w)=>{if(w===y){const x={};return r&&(x.autoFocus=!0),_.props.tabIndex===void 0&&f==="selectedMenu"&&(x.tabIndex=0),b.cloneElement(_,x)}return _});return v.jsx(Wq,D({role:"menu",ref:q,className:o,onKeyDown:p,tabIndex:s?0:-1},g,{children:h}))}),Vq=Hq,Kq=["input","select","textarea","a[href]","button","[tabindex]","audio[controls]","video[controls]",'[contenteditable]:not([contenteditable="false"])'].join(",");function Gq(e){const t=parseInt(e.getAttribute("tabindex")||"",10);return Number.isNaN(t)?e.contentEditable==="true"||(e.nodeName==="AUDIO"||e.nodeName==="VIDEO"||e.nodeName==="DETAILS")&&e.getAttribute("tabindex")===null?0:e.tabIndex:t}function Xq(e){if(e.tagName!=="INPUT"||e.type!=="radio"||!e.name)return!1;const t=i=>e.ownerDocument.querySelector(`input[type="radio"]${i}`);let a=t(`[name="${e.name}"]:checked`);return a||(a=t(`[name="${e.name}"]`)),a!==e}function Qq(e){return!(e.disabled||e.tagName==="INPUT"&&e.type==="hidden"||Xq(e))}function Jq(e){const t=[],a=[];return Array.from(e.querySelectorAll(Kq)).forEach((i,s)=>{const r=Gq(i);r===-1||!Qq(i)||(r===0?t.push(i):a.push({documentOrder:s,tabIndex:r,node:i}))}),a.sort((i,s)=>i.tabIndex===s.tabIndex?i.documentOrder-s.documentOrder:i.tabIndex-s.tabIndex).map(i=>i.node).concat(t)}function Yq(){return!0}function Zq(e){const{children:t,disableAutoFocus:a=!1,disableEnforceFocus:i=!1,disableRestoreFocus:s=!1,getTabbable:r=Jq,isEnabled:n=Yq,open:o}=e,l=b.useRef(!1),u=b.useRef(null),d=b.useRef(null),f=b.useRef(null),g=b.useRef(null),c=b.useRef(!1),m=b.useRef(null),p=Xe(t.ref,m),q=b.useRef(null);b.useEffect(()=>{!o||!m.current||(c.current=!a)},[a,o]),b.useEffect(()=>{if(!o||!m.current)return;const _=ct(m.current);return m.current.contains(_.activeElement)||(m.current.hasAttribute("tabIndex")||m.current.setAttribute("tabIndex","-1"),c.current&&m.current.focus()),()=>{s||(f.current&&f.current.focus&&(l.current=!0,f.current.focus()),f.current=null)}},[o]),b.useEffect(()=>{if(!o||!m.current)return;const _=ct(m.current),w=T=>{const{current:S}=m;if(S!==null){if(!_.hasFocus()||i||!n()||l.current){l.current=!1;return}if(!S.contains(_.activeElement)){if(T&&g.current!==T.target||_.activeElement!==g.current)g.current=null;else if(g.current!==null)return;if(!c.current)return;let j=[];if((_.activeElement===u.current||_.activeElement===d.current)&&(j=r(m.current)),j.length>0){var A,P;const O=!!((A=q.current)!=null&&A.shiftKey&&((P=q.current)==null?void 0:P.key)==="Tab"),z=j[0],R=j[j.length-1];typeof z!="string"&&typeof R!="string"&&(O?R.focus():z.focus())}else S.focus()}}},x=T=>{q.current=T,!(i||!n()||T.key!=="Tab")&&_.activeElement===m.current&&T.shiftKey&&(l.current=!0,d.current&&d.current.focus())};_.addEventListener("focusin",w),_.addEventListener("keydown",x,!0);const k=setInterval(()=>{_.activeElement&&_.activeElement.tagName==="BODY"&&w(null)},50);return()=>{clearInterval(k),_.removeEventListener("focusin",w),_.removeEventListener("keydown",x,!0)}},[a,i,s,n,o,r]);const y=_=>{f.current===null&&(f.current=_.relatedTarget),c.current=!0,g.current=_.target;const w=t.props.onFocus;w&&w(_)},h=_=>{f.current===null&&(f.current=_.relatedTarget),c.current=!0};return v.jsxs(b.Fragment,{children:[v.jsx("div",{tabIndex:o?0:-1,onFocus:h,ref:u,"data-testid":"sentinelStart"}),b.cloneElement(t,{ref:p,onFocus:y}),v.jsx("div",{tabIndex:o?0:-1,onFocus:h,ref:d,"data-testid":"sentinelEnd"})]})}function eD(e){return typeof e=="function"?e():e}const tD=b.forwardRef(function(t,a){const{children:i,container:s,disablePortal:r=!1}=t,[n,o]=b.useState(null),l=Xe(b.isValidElement(i)?i.ref:null,a);if(Va(()=>{r||o(eD(s)||document.body)},[s,r]),Va(()=>{if(n&&!r)return Qn(a,n),()=>{Qn(a,null)}},[a,n,r]),r){if(b.isValidElement(i)){const u={ref:l};return b.cloneElement(i,u)}return v.jsx(b.Fragment,{children:i})}return v.jsx(b.Fragment,{children:n&&vo.createPortal(i,n)})});function aD(e){const t=ct(e);return t.body===e?ia(e).innerWidth>t.documentElement.clientWidth:e.scrollHeight>e.clientHeight}function Ys(e,t){t?e.setAttribute("aria-hidden","true"):e.removeAttribute("aria-hidden")}function zf(e){return parseInt(ia(e).getComputedStyle(e).paddingRight,10)||0}function iD(e){const a=["TEMPLATE","SCRIPT","STYLE","LINK","MAP","META","NOSCRIPT","PICTURE","COL","COLGROUP","PARAM","SLOT","SOURCE","TRACK"].indexOf(e.tagName)!==-1,i=e.tagName==="INPUT"&&e.getAttribute("type")==="hidden";return a||i}function $f(e,t,a,i,s){const r=[t,a,...i];[].forEach.call(e.children,n=>{const o=r.indexOf(n)===-1,l=!iD(n);o&&l&&Ys(n,s)})}function Bl(e,t){let a=-1;return e.some((i,s)=>t(i)?(a=s,!0):!1),a}function sD(e,t){const a=[],i=e.container;if(!t.disableScrollLock){if(aD(i)){const n=Wg(ct(i));a.push({value:i.style.paddingRight,property:"padding-right",el:i}),i.style.paddingRight=`${zf(i)+n}px`;const o=ct(i).querySelectorAll(".mui-fixed");[].forEach.call(o,l=>{a.push({value:l.style.paddingRight,property:"padding-right",el:l}),l.style.paddingRight=`${zf(l)+n}px`})}let r;if(i.parentNode instanceof DocumentFragment)r=ct(i).body;else{const n=i.parentElement,o=ia(i);r=(n==null?void 0:n.nodeName)==="HTML"&&o.getComputedStyle(n).overflowY==="scroll"?n:i}a.push({value:r.style.overflow,property:"overflow",el:r},{value:r.style.overflowX,property:"overflow-x",el:r},{value:r.style.overflowY,property:"overflow-y",el:r}),r.style.overflow="hidden"}return()=>{a.forEach(({value:r,el:n,property:o})=>{r?n.style.setProperty(o,r):n.style.removeProperty(o)})}}function rD(e){const t=[];return[].forEach.call(e.children,a=>{a.getAttribute("aria-hidden")==="true"&&t.push(a)}),t}class nD{constructor(){this.containers=void 0,this.modals=void 0,this.modals=[],this.containers=[]}add(t,a){let i=this.modals.indexOf(t);if(i!==-1)return i;i=this.modals.length,this.modals.push(t),t.modalRef&&Ys(t.modalRef,!1);const s=rD(a);$f(a,t.mount,t.modalRef,s,!0);const r=Bl(this.containers,n=>n.container===a);return r!==-1?(this.containers[r].modals.push(t),i):(this.containers.push({modals:[t],container:a,restore:null,hiddenSiblings:s}),i)}mount(t,a){const i=Bl(this.containers,r=>r.modals.indexOf(t)!==-1),s=this.containers[i];s.restore||(s.restore=sD(s,a))}remove(t,a=!0){const i=this.modals.indexOf(t);if(i===-1)return i;const s=Bl(this.containers,n=>n.modals.indexOf(t)!==-1),r=this.containers[s];if(r.modals.splice(r.modals.indexOf(t),1),this.modals.splice(i,1),r.modals.length===0)r.restore&&r.restore(),t.modalRef&&Ys(t.modalRef,a),$f(r.container,t.mount,t.modalRef,r.hiddenSiblings,!1),this.containers.splice(s,1);else{const n=r.modals[r.modals.length-1];n.modalRef&&Ys(n.modalRef,!1)}return i}isTopModal(t){return this.modals.length>0&&this.modals[this.modals.length-1]===t}}function oD(e){return typeof e=="function"?e():e}function lD(e){return e?e.props.hasOwnProperty("in"):!1}const uD=new nD;function dD(e){const{container:t,disableEscapeKeyDown:a=!1,disableScrollLock:i=!1,manager:s=uD,closeAfterTransition:r=!1,onTransitionEnter:n,onTransitionExited:o,children:l,onClose:u,open:d,rootRef:f}=e,g=b.useRef({}),c=b.useRef(null),m=b.useRef(null),p=Xe(m,f),[q,y]=b.useState(!d),h=lD(l);let _=!0;(e["aria-hidden"]==="false"||e["aria-hidden"]===!1)&&(_=!1);const w=()=>ct(c.current),x=()=>(g.current.modalRef=m.current,g.current.mount=c.current,g.current),k=()=>{s.mount(x(),{disableScrollLock:i}),m.current&&(m.current.scrollTop=0)},T=ci(()=>{const M=oD(t)||w().body;s.add(x(),M),m.current&&k()}),S=b.useCallback(()=>s.isTopModal(x()),[s]),A=ci(M=>{c.current=M,M&&(d&&S()?k():m.current&&Ys(m.current,_))}),P=b.useCallback(()=>{s.remove(x(),_)},[_,s]);b.useEffect(()=>()=>{P()},[P]),b.useEffect(()=>{d?T():(!h||!r)&&P()},[d,P,h,r,T]);const j=M=>N=>{var C;(C=M.onKeyDown)==null||C.call(M,N),!(N.key!=="Escape"||!S())&&(a||(N.stopPropagation(),u&&u(N,"escapeKeyDown")))},O=M=>N=>{var C;(C=M.onClick)==null||C.call(M,N),N.target===N.currentTarget&&u&&u(N,"backdropClick")};return{getRootProps:(M={})=>{const N=_y(e);delete N.onTransitionEnter,delete N.onTransitionExited;const C=D({},N,M);return D({role:"presentation"},C,{onKeyDown:j(C),ref:p})},getBackdropProps:(M={})=>{const N=M;return D({"aria-hidden":!0},N,{onClick:O(N),open:d})},getTransitionProps:()=>{const M=()=>{y(!1),n&&n()},N=()=>{y(!0),o&&o(),r&&P()};return{onEnter:Vu(M,l.props.onEnter),onExited:Vu(N,l.props.onExited)}},rootRef:p,portalRef:A,isTopModal:S,exited:q,hasTransition:h}}function cD(e){return Pe("MuiModal",e)}Te("MuiModal",["root","hidden","backdrop"]);const pD=["children","closeAfterTransition","container","disableAutoFocus","disableEnforceFocus","disableEscapeKeyDown","disablePortal","disableRestoreFocus","disableScrollLock","hideBackdrop","keepMounted","onBackdropClick","onClose","onKeyDown","open","onTransitionEnter","onTransitionExited","slotProps","slots"],fD=e=>{const{open:t,exited:a}=e;return Fe({root:["root",!t&&a&&"hidden"],backdrop:["backdrop"]},Fq(cD))},mD=b.forwardRef(function(t,a){var i;const{children:s,closeAfterTransition:r=!1,container:n,disableAutoFocus:o=!1,disableEnforceFocus:l=!1,disableEscapeKeyDown:u=!1,disablePortal:d=!1,disableRestoreFocus:f=!1,disableScrollLock:g=!1,hideBackdrop:c=!1,keepMounted:m=!1,onBackdropClick:p,open:q,slotProps:y={},slots:h={}}=t,_=U(t,pD),w=D({},t,{closeAfterTransition:r,disableAutoFocus:o,disableEnforceFocus:l,disableEscapeKeyDown:u,disablePortal:d,disableRestoreFocus:f,disableScrollLock:g,hideBackdrop:c,keepMounted:m}),{getRootProps:x,getBackdropProps:k,getTransitionProps:T,portalRef:S,isTopModal:A,exited:P,hasTransition:j}=dD(D({},w,{rootRef:a})),O=D({},w,{exited:P,hasTransition:j}),z=fD(O),R={};if(s.props.tabIndex===void 0&&(R.tabIndex="-1"),j){const{onEnter:I,onExited:L}=T();R.onEnter=I,R.onExited=L}const F=(i=h.root)!=null?i:"div",M=us({elementType:F,externalSlotProps:y.root,externalForwardedProps:_,getSlotProps:x,className:z.root,ownerState:O}),N=h.backdrop,C=us({elementType:N,externalSlotProps:y.backdrop,getSlotProps:I=>k(D({},I,{onClick:L=>{p&&p(L),I!=null&&I.onClick&&I.onClick(L)}})),className:z.backdrop,ownerState:O});return!m&&!q&&(!j||P)?null:v.jsx(tD,{ref:S,container:n,disablePortal:d,children:v.jsxs(F,D({},M,{children:[!c&&N?v.jsx(N,D({},C)):null,v.jsx(Zq,{disableEnforceFocus:l,disableAutoFocus:o,disableRestoreFocus:f,isEnabled:A,open:q,children:b.cloneElement(s,R)})]}))})}),hD=["onChange","maxRows","minRows","style","value"];function en(e){return parseInt(e,10)||0}const gD={shadow:{visibility:"hidden",position:"absolute",overflow:"hidden",height:0,top:0,left:0,transform:"translateZ(0)"}};function Wf(e){return e==null||Object.keys(e).length===0||e.outerHeightStyle===0&&!e.overflow}const yD=b.forwardRef(function(t,a){const{onChange:i,maxRows:s,minRows:r=1,style:n,value:o}=t,l=U(t,hD),{current:u}=b.useRef(o!=null),d=b.useRef(null),f=Xe(a,d),g=b.useRef(null),c=b.useRef(0),[m,p]=b.useState({outerHeightStyle:0}),q=b.useCallback(()=>{const x=d.current,T=ia(x).getComputedStyle(x);if(T.width==="0px")return{outerHeightStyle:0};const S=g.current;S.style.width=T.width,S.value=x.value||t.placeholder||"x",S.value.slice(-1)===` +`&&(S.value+=" ");const A=T.boxSizing,P=en(T.paddingBottom)+en(T.paddingTop),j=en(T.borderBottomWidth)+en(T.borderTopWidth),O=S.scrollHeight;S.value="x";const z=S.scrollHeight;let R=O;r&&(R=Math.max(Number(r)*z,R)),s&&(R=Math.min(Number(s)*z,R)),R=Math.max(R,z);const F=R+(A==="border-box"?P+j:0),M=Math.abs(R-O)<=1;return{outerHeightStyle:F,overflow:M}},[s,r,t.placeholder]),y=(x,k)=>{const{outerHeightStyle:T,overflow:S}=k;return c.current<20&&(T>0&&Math.abs((x.outerHeightStyle||0)-T)>1||x.overflow!==S)?(c.current+=1,{overflow:S,outerHeightStyle:T}):x},h=b.useCallback(()=>{const x=q();Wf(x)||p(k=>y(k,x))},[q]),_=()=>{const x=q();Wf(x)||vo.flushSync(()=>{p(k=>y(k,x))})};b.useEffect(()=>{const x=Ro(()=>{c.current=0,d.current&&_()});let k;const T=d.current,S=ia(T);return S.addEventListener("resize",x),typeof ResizeObserver<"u"&&(k=new ResizeObserver(x),k.observe(T)),()=>{x.clear(),S.removeEventListener("resize",x),k&&k.disconnect()}}),Va(()=>{h()}),b.useEffect(()=>{c.current=0},[o]);const w=x=>{c.current=0,u||h(),i&&i(x)};return v.jsxs(b.Fragment,{children:[v.jsx("textarea",D({value:o,onChange:w,ref:f,rows:r,style:D({height:m.outerHeightStyle,overflow:m.overflow?"hidden":void 0},n)},l)),v.jsx("textarea",{"aria-hidden":!0,className:t.className,readOnly:!0,ref:g,tabIndex:-1,style:D({},gD.shadow,n,{paddingTop:0,paddingBottom:0})})]})}),Rc=e=>e.scrollTop;function ds(e,t){var a,i;const{timeout:s,easing:r,style:n={}}=e;return{duration:(a=n.transitionDuration)!=null?a:typeof s=="number"?s:s[t.mode]||0,easing:(i=n.transitionTimingFunction)!=null?i:typeof r=="object"?r[t.mode]:r,delay:n.transitionDelay}}const _D=["addEndListener","appear","children","easing","in","onEnter","onEntered","onEntering","onExit","onExited","onExiting","style","timeout","TransitionComponent"];function id(e){return`scale(${e}, ${e**2})`}const vD={entering:{opacity:1,transform:id(1)},entered:{opacity:1,transform:"none"}},zl=typeof navigator<"u"&&/^((?!chrome|android).)*(safari|mobile)/i.test(navigator.userAgent)&&/(os |version\/)15(.|_)4/i.test(navigator.userAgent),by=b.forwardRef(function(t,a){const{addEndListener:i,appear:s=!0,children:r,easing:n,in:o,onEnter:l,onEntered:u,onEntering:d,onExit:f,onExited:g,onExiting:c,style:m,timeout:p="auto",TransitionComponent:q=Pc}=t,y=U(t,_D),h=b.useRef(),_=b.useRef(),w=Rr(),x=b.useRef(null),k=Xe(x,r.ref,a),T=F=>M=>{if(F){const N=x.current;M===void 0?F(N):F(N,M)}},S=T(d),A=T((F,M)=>{Rc(F);const{duration:N,delay:C,easing:I}=ds({style:m,timeout:p,easing:n},{mode:"enter"});let L;p==="auto"?(L=w.transitions.getAutoHeightDuration(F.clientHeight),_.current=L):L=N,F.style.transition=[w.transitions.create("opacity",{duration:L,delay:C}),w.transitions.create("transform",{duration:zl?L:L*.666,delay:C,easing:I})].join(","),l&&l(F,M)}),P=T(u),j=T(c),O=T(F=>{const{duration:M,delay:N,easing:C}=ds({style:m,timeout:p,easing:n},{mode:"exit"});let I;p==="auto"?(I=w.transitions.getAutoHeightDuration(F.clientHeight),_.current=I):I=M,F.style.transition=[w.transitions.create("opacity",{duration:I,delay:N}),w.transitions.create("transform",{duration:zl?I:I*.666,delay:zl?N:N||I*.333,easing:C})].join(","),F.style.opacity=0,F.style.transform=id(.75),f&&f(F)}),z=T(g),R=F=>{p==="auto"&&(h.current=setTimeout(F,_.current||0)),i&&i(x.current,F)};return b.useEffect(()=>()=>{clearTimeout(h.current)},[]),v.jsx(q,D({appear:s,in:o,nodeRef:x,onEnter:A,onEntered:P,onEntering:S,onExit:O,onExited:z,onExiting:j,addEndListener:R,timeout:p==="auto"?null:p},y,{children:(F,M)=>b.cloneElement(r,D({style:D({opacity:0,transform:id(.75),visibility:F==="exited"&&!o?"hidden":void 0},vD[F],m,r.props.style),ref:k},M))}))});by.muiSupportAuto=!0;const bD=by,wD=["addEndListener","appear","children","easing","in","onEnter","onEntered","onEntering","onExit","onExited","onExiting","style","timeout","TransitionComponent"],TD={entering:{opacity:1},entered:{opacity:1}},xD=b.forwardRef(function(t,a){const i=Rr(),s={enter:i.transitions.duration.enteringScreen,exit:i.transitions.duration.leavingScreen},{addEndListener:r,appear:n=!0,children:o,easing:l,in:u,onEnter:d,onEntered:f,onEntering:g,onExit:c,onExited:m,onExiting:p,style:q,timeout:y=s,TransitionComponent:h=Pc}=t,_=U(t,wD),w=b.useRef(null),x=Xe(w,o.ref,a),k=R=>F=>{if(R){const M=w.current;F===void 0?R(M):R(M,F)}},T=k(g),S=k((R,F)=>{Rc(R);const M=ds({style:q,timeout:y,easing:l},{mode:"enter"});R.style.webkitTransition=i.transitions.create("opacity",M),R.style.transition=i.transitions.create("opacity",M),d&&d(R,F)}),A=k(f),P=k(p),j=k(R=>{const F=ds({style:q,timeout:y,easing:l},{mode:"exit"});R.style.webkitTransition=i.transitions.create("opacity",F),R.style.transition=i.transitions.create("opacity",F),c&&c(R)}),O=k(m),z=R=>{r&&r(w.current,R)};return v.jsx(h,D({appear:n,in:u,nodeRef:w,onEnter:S,onEntered:A,onEntering:T,onExit:j,onExited:O,onExiting:P,addEndListener:z,timeout:y},_,{children:(R,F)=>b.cloneElement(o,D({style:D({opacity:0,visibility:R==="exited"&&!u?"hidden":void 0},TD[R],q,o.props.style),ref:x},F))}))}),qD=xD;function DD(e){return Pe("MuiBackdrop",e)}Te("MuiBackdrop",["root","invisible"]);const kD=["children","className","component","components","componentsProps","invisible","open","slotProps","slots","TransitionComponent","transitionDuration"],SD=e=>{const{classes:t,invisible:a}=e;return Fe({root:["root",a&&"invisible"]},DD,t)},CD=V("div",{name:"MuiBackdrop",slot:"Root",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.root,a.invisible&&t.invisible]}})(({ownerState:e})=>D({position:"fixed",display:"flex",alignItems:"center",justifyContent:"center",right:0,bottom:0,top:0,left:0,backgroundColor:"rgba(0, 0, 0, 0.5)",WebkitTapHighlightColor:"transparent"},e.invisible&&{backgroundColor:"transparent"})),PD=b.forwardRef(function(t,a){var i,s,r;const n=Ne({props:t,name:"MuiBackdrop"}),{children:o,className:l,component:u="div",components:d={},componentsProps:f={},invisible:g=!1,open:c,slotProps:m={},slots:p={},TransitionComponent:q=qD,transitionDuration:y}=n,h=U(n,kD),_=D({},n,{component:u,invisible:g}),w=SD(_),x=(i=m.root)!=null?i:f.root;return v.jsx(q,D({in:c,timeout:y},h,{children:v.jsx(CD,D({"aria-hidden":!0},x,{as:(s=(r=p.root)!=null?r:d.Root)!=null?s:u,className:Q(w.root,l,x==null?void 0:x.className),ownerState:D({},_,x==null?void 0:x.ownerState),classes:w,ref:a,children:o}))}))}),AD=PD,ID=["BackdropComponent","BackdropProps","classes","className","closeAfterTransition","children","container","component","components","componentsProps","disableAutoFocus","disableEnforceFocus","disableEscapeKeyDown","disablePortal","disableRestoreFocus","disableScrollLock","hideBackdrop","keepMounted","onBackdropClick","onClose","open","slotProps","slots","theme"],RD=V("div",{name:"MuiModal",slot:"Root",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.root,!a.open&&a.exited&&t.hidden]}})(({theme:e,ownerState:t})=>D({position:"fixed",zIndex:(e.vars||e).zIndex.modal,right:0,bottom:0,top:0,left:0},!t.open&&t.exited&&{visibility:"hidden"})),ED=V(AD,{name:"MuiModal",slot:"Backdrop",overridesResolver:(e,t)=>t.backdrop})({zIndex:-1}),jD=b.forwardRef(function(t,a){var i,s,r,n,o,l;const u=Ne({name:"MuiModal",props:t}),{BackdropComponent:d=ED,BackdropProps:f,classes:g,className:c,closeAfterTransition:m=!1,children:p,container:q,component:y,components:h={},componentsProps:_={},disableAutoFocus:w=!1,disableEnforceFocus:x=!1,disableEscapeKeyDown:k=!1,disablePortal:T=!1,disableRestoreFocus:S=!1,disableScrollLock:A=!1,hideBackdrop:P=!1,keepMounted:j=!1,onBackdropClick:O,onClose:z,open:R,slotProps:F,slots:M,theme:N}=u,C=U(u,ID),[I,L]=b.useState(!0),K={container:q,closeAfterTransition:m,disableAutoFocus:w,disableEnforceFocus:x,disableEscapeKeyDown:k,disablePortal:T,disableRestoreFocus:S,disableScrollLock:A,hideBackdrop:P,keepMounted:j,onBackdropClick:O,onClose:z,open:R},X=D({},u,K,{exited:I}),Ae=(i=(s=M==null?void 0:M.root)!=null?s:h.Root)!=null?i:RD,ue=(r=(n=M==null?void 0:M.backdrop)!=null?n:h.Backdrop)!=null?r:d,xe=(o=F==null?void 0:F.root)!=null?o:_.root,re=(l=F==null?void 0:F.backdrop)!=null?l:_.backdrop;return v.jsx(mD,D({slots:{root:Ae,backdrop:ue},slotProps:{root:()=>D({},ad(xe,X),!wr(Ae)&&{as:y,theme:N},{className:Q(c,xe==null?void 0:xe.className,g==null?void 0:g.root,!X.open&&X.exited&&(g==null?void 0:g.hidden))}),backdrop:()=>D({},f,ad(re,X),{className:Q(re==null?void 0:re.className,f==null?void 0:f.className,g==null?void 0:g.backdrop)})},onTransitionEnter:()=>L(!1),onTransitionExited:()=>L(!0),ref:a},C,K,{children:p}))}),wy=jD;function FD(e){return Pe("MuiPaper",e)}Te("MuiPaper",["root","rounded","outlined","elevation","elevation0","elevation1","elevation2","elevation3","elevation4","elevation5","elevation6","elevation7","elevation8","elevation9","elevation10","elevation11","elevation12","elevation13","elevation14","elevation15","elevation16","elevation17","elevation18","elevation19","elevation20","elevation21","elevation22","elevation23","elevation24"]);const ND=["className","component","elevation","square","variant"],LD=e=>{const{square:t,elevation:a,variant:i,classes:s}=e,r={root:["root",i,!t&&"rounded",i==="elevation"&&`elevation${a}`]};return Fe(r,FD,s)},MD=V("div",{name:"MuiPaper",slot:"Root",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.root,t[a.variant],!a.square&&t.rounded,a.variant==="elevation"&&t[`elevation${a.elevation}`]]}})(({theme:e,ownerState:t})=>{var a;return D({backgroundColor:(e.vars||e).palette.background.paper,color:(e.vars||e).palette.text.primary,transition:e.transitions.create("box-shadow")},!t.square&&{borderRadius:e.shape.borderRadius},t.variant==="outlined"&&{border:`1px solid ${(e.vars||e).palette.divider}`},t.variant==="elevation"&&D({boxShadow:(e.vars||e).shadows[t.elevation]},!e.vars&&e.palette.mode==="dark"&&{backgroundImage:`linear-gradient(${pi("#fff",Sf(t.elevation))}, ${pi("#fff",Sf(t.elevation))})`},e.vars&&{backgroundImage:(a=e.vars.overlays)==null?void 0:a[t.elevation]}))}),OD=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiPaper"}),{className:s,component:r="div",elevation:n=1,square:o=!1,variant:l="elevation"}=i,u=U(i,ND),d=D({},i,{component:r,elevation:n,square:o,variant:l}),f=LD(d);return v.jsx(MD,D({as:r,ownerState:d,className:Q(f.root,s),ref:a},u))}),Ty=OD;function BD(e){return Pe("MuiPopover",e)}Te("MuiPopover",["root","paper"]);const zD=["onEntering"],$D=["action","anchorEl","anchorOrigin","anchorPosition","anchorReference","children","className","container","elevation","marginThreshold","open","PaperProps","slots","slotProps","transformOrigin","TransitionComponent","transitionDuration","TransitionProps"],WD=["slotProps"];function Uf(e,t){let a=0;return typeof t=="number"?a=t:t==="center"?a=e.height/2:t==="bottom"&&(a=e.height),a}function Hf(e,t){let a=0;return typeof t=="number"?a=t:t==="center"?a=e.width/2:t==="right"&&(a=e.width),a}function Vf(e){return[e.horizontal,e.vertical].map(t=>typeof t=="number"?`${t}px`:t).join(" ")}function $l(e){return typeof e=="function"?e():e}const UD=e=>{const{classes:t}=e;return Fe({root:["root"],paper:["paper"]},BD,t)},HD=V(wy,{name:"MuiPopover",slot:"Root",overridesResolver:(e,t)=>t.root})({}),xy=V(Ty,{name:"MuiPopover",slot:"Paper",overridesResolver:(e,t)=>t.paper})({position:"absolute",overflowY:"auto",overflowX:"hidden",minWidth:16,minHeight:16,maxWidth:"calc(100% - 32px)",maxHeight:"calc(100% - 32px)",outline:0}),VD=b.forwardRef(function(t,a){var i,s,r;const n=Ne({props:t,name:"MuiPopover"}),{action:o,anchorEl:l,anchorOrigin:u={vertical:"top",horizontal:"left"},anchorPosition:d,anchorReference:f="anchorEl",children:g,className:c,container:m,elevation:p=8,marginThreshold:q=16,open:y,PaperProps:h={},slots:_,slotProps:w,transformOrigin:x={vertical:"top",horizontal:"left"},TransitionComponent:k=bD,transitionDuration:T="auto",TransitionProps:{onEntering:S}={}}=n,A=U(n.TransitionProps,zD),P=U(n,$D),j=(i=w==null?void 0:w.paper)!=null?i:h,O=b.useRef(),z=Xe(O,j.ref),R=D({},n,{anchorOrigin:u,anchorReference:f,elevation:p,marginThreshold:q,externalPaperSlotProps:j,transformOrigin:x,TransitionComponent:k,transitionDuration:T,TransitionProps:A}),F=UD(R),M=b.useCallback(()=>{if(f==="anchorPosition")return d;const G=$l(l),qe=(G&&G.nodeType===1?G:ct(O.current).body).getBoundingClientRect();return{top:qe.top+Uf(qe,u.vertical),left:qe.left+Hf(qe,u.horizontal)}},[l,u.horizontal,u.vertical,d,f]),N=b.useCallback(G=>({vertical:Uf(G,x.vertical),horizontal:Hf(G,x.horizontal)}),[x.horizontal,x.vertical]),C=b.useCallback(G=>{const ge={width:G.offsetWidth,height:G.offsetHeight},qe=N(ge);if(f==="none")return{top:null,left:null,transformOrigin:Vf(qe)};const qt=M();let Dt=qt.top-qe.vertical,kt=qt.left-qe.horizontal;const Nt=Dt+ge.height,wa=kt+ge.width,Ue=ia($l(l)),Le=Ue.innerHeight-q,Lt=Ue.innerWidth-q;if(DtLe){const ye=Nt-Le;Dt-=ye,qe.vertical+=ye}if(ktLt){const ye=wa-Lt;kt-=ye,qe.horizontal+=ye}return{top:`${Math.round(Dt)}px`,left:`${Math.round(kt)}px`,transformOrigin:Vf(qe)}},[l,f,M,N,q]),[I,L]=b.useState(y),K=b.useCallback(()=>{const G=O.current;if(!G)return;const ge=C(G);ge.top!==null&&(G.style.top=ge.top),ge.left!==null&&(G.style.left=ge.left),G.style.transformOrigin=ge.transformOrigin,L(!0)},[C]),X=(G,ge)=>{S&&S(G,ge),K()},Ae=()=>{L(!1)};b.useEffect(()=>{y&&K()}),b.useImperativeHandle(o,()=>y?{updatePosition:()=>{K()}}:null,[y,K]),b.useEffect(()=>{if(!y)return;const G=Ro(()=>{K()}),ge=ia(l);return ge.addEventListener("resize",G),()=>{G.clear(),ge.removeEventListener("resize",G)}},[l,y,K]);let ue=T;T==="auto"&&!k.muiSupportAuto&&(ue=void 0);const xe=m||(l?ct($l(l)).body:void 0),re=(s=_==null?void 0:_.root)!=null?s:HD,rt=(r=_==null?void 0:_.paper)!=null?r:xy,Ya=us({elementType:rt,externalSlotProps:D({},j,{style:I?j.style:D({},j.style,{opacity:0})}),additionalProps:{elevation:p,ref:z},ownerState:R,className:Q(F.paper,j==null?void 0:j.className)}),Be=us({elementType:re,externalSlotProps:(w==null?void 0:w.root)||{},externalForwardedProps:P,additionalProps:{ref:a,slotProps:{backdrop:{invisible:!0}},container:xe,open:y},ownerState:R,className:Q(F.root,c)}),{slotProps:xt}=Be,ne=U(Be,WD);return v.jsx(re,D({},ne,!wr(re)&&{slotProps:xt},{children:v.jsx(k,D({appear:!0,in:y,onEntering:X,onExited:Ae,timeout:ue},A,{children:v.jsx(rt,D({},Ya,{children:g}))}))}))}),KD=VD;function GD(e){return Pe("MuiMenu",e)}Te("MuiMenu",["root","paper","list"]);const XD=["onEntering"],QD=["autoFocus","children","className","disableAutoFocusItem","MenuListProps","onClose","open","PaperProps","PopoverClasses","transitionDuration","TransitionProps","variant","slots","slotProps"],JD={vertical:"top",horizontal:"right"},YD={vertical:"top",horizontal:"left"},ZD=e=>{const{classes:t}=e;return Fe({root:["root"],paper:["paper"],list:["list"]},GD,t)},ek=V(KD,{shouldForwardProp:e=>sa(e)||e==="classes",name:"MuiMenu",slot:"Root",overridesResolver:(e,t)=>t.root})({}),tk=V(xy,{name:"MuiMenu",slot:"Paper",overridesResolver:(e,t)=>t.paper})({maxHeight:"calc(100% - 96px)",WebkitOverflowScrolling:"touch"}),ak=V(Vq,{name:"MuiMenu",slot:"List",overridesResolver:(e,t)=>t.list})({outline:0}),ik=b.forwardRef(function(t,a){var i,s;const r=Ne({props:t,name:"MuiMenu"}),{autoFocus:n=!0,children:o,className:l,disableAutoFocusItem:u=!1,MenuListProps:d={},onClose:f,open:g,PaperProps:c={},PopoverClasses:m,transitionDuration:p="auto",TransitionProps:{onEntering:q}={},variant:y="selectedMenu",slots:h={},slotProps:_={}}=r,w=U(r.TransitionProps,XD),x=U(r,QD),k=Rr(),T=k.direction==="rtl",S=D({},r,{autoFocus:n,disableAutoFocusItem:u,MenuListProps:d,onEntering:q,PaperProps:c,transitionDuration:p,TransitionProps:w,variant:y}),A=ZD(S),P=n&&!u&&g,j=b.useRef(null),O=(I,L)=>{j.current&&j.current.adjustStyleForScrollbar(I,k),q&&q(I,L)},z=I=>{I.key==="Tab"&&(I.preventDefault(),f&&f(I,"tabKeyDown"))};let R=-1;b.Children.map(o,(I,L)=>{b.isValidElement(I)&&(I.props.disabled||(y==="selectedMenu"&&I.props.selected||R===-1)&&(R=L))});const F=(i=h.paper)!=null?i:tk,M=(s=_.paper)!=null?s:c,N=us({elementType:h.root,externalSlotProps:_.root,ownerState:S,className:[A.root,l]}),C=us({elementType:F,externalSlotProps:M,ownerState:S,className:A.paper});return v.jsx(ek,D({onClose:f,anchorOrigin:{vertical:"bottom",horizontal:T?"right":"left"},transformOrigin:T?JD:YD,slots:{paper:F,root:h.root},slotProps:{root:N,paper:C},open:g,ref:a,transitionDuration:p,TransitionProps:D({onEntering:O},w),ownerState:S},x,{classes:m,children:v.jsx(ak,D({onKeyDown:z,actions:j,autoFocus:n&&(R===-1||u),autoFocusItem:P,variant:y},d,{className:Q(A.list,d.className),children:o}))}))}),sk=ik;function rk(e){return Pe("MuiNativeSelect",e)}const nk=Te("MuiNativeSelect",["root","select","multiple","filled","outlined","standard","disabled","icon","iconOpen","iconFilled","iconOutlined","iconStandard","nativeInput","error"]),Ec=nk,ok=["className","disabled","error","IconComponent","inputRef","variant"],lk=e=>{const{classes:t,variant:a,disabled:i,multiple:s,open:r,error:n}=e,o={select:["select",a,i&&"disabled",s&&"multiple",n&&"error"],icon:["icon",`icon${be(a)}`,r&&"iconOpen",i&&"disabled"]};return Fe(o,rk,t)},qy=({ownerState:e,theme:t})=>D({MozAppearance:"none",WebkitAppearance:"none",userSelect:"none",borderRadius:0,cursor:"pointer","&:focus":D({},t.vars?{backgroundColor:`rgba(${t.vars.palette.common.onBackgroundChannel} / 0.05)`}:{backgroundColor:t.palette.mode==="light"?"rgba(0, 0, 0, 0.05)":"rgba(255, 255, 255, 0.05)"},{borderRadius:0}),"&::-ms-expand":{display:"none"},[`&.${Ec.disabled}`]:{cursor:"default"},"&[multiple]":{height:"auto"},"&:not([multiple]) option, &:not([multiple]) optgroup":{backgroundColor:(t.vars||t).palette.background.paper},"&&&":{paddingRight:24,minWidth:16}},e.variant==="filled"&&{"&&&":{paddingRight:32}},e.variant==="outlined"&&{borderRadius:(t.vars||t).shape.borderRadius,"&:focus":{borderRadius:(t.vars||t).shape.borderRadius},"&&&":{paddingRight:32}}),uk=V("select",{name:"MuiNativeSelect",slot:"Select",shouldForwardProp:sa,overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.select,t[a.variant],a.error&&t.error,{[`&.${Ec.multiple}`]:t.multiple}]}})(qy),Dy=({ownerState:e,theme:t})=>D({position:"absolute",right:0,top:"calc(50% - .5em)",pointerEvents:"none",color:(t.vars||t).palette.action.active,[`&.${Ec.disabled}`]:{color:(t.vars||t).palette.action.disabled}},e.open&&{transform:"rotate(180deg)"},e.variant==="filled"&&{right:7},e.variant==="outlined"&&{right:7}),dk=V("svg",{name:"MuiNativeSelect",slot:"Icon",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.icon,a.variant&&t[`icon${be(a.variant)}`],a.open&&t.iconOpen]}})(Dy),ck=b.forwardRef(function(t,a){const{className:i,disabled:s,error:r,IconComponent:n,inputRef:o,variant:l="standard"}=t,u=U(t,ok),d=D({},t,{disabled:s,variant:l,error:r}),f=lk(d);return v.jsxs(b.Fragment,{children:[v.jsx(uk,D({ownerState:d,className:Q(f.select,i),disabled:s,ref:o||a},u)),t.multiple?null:v.jsx(dk,{as:n,ownerState:d,className:f.icon})]})}),pk=ck;function fk(e){return Pe("MuiSelect",e)}const mk=Te("MuiSelect",["select","multiple","filled","outlined","standard","disabled","focused","icon","iconOpen","iconFilled","iconOutlined","iconStandard","nativeInput","error"]),Rs=mk;var Kf;const hk=["aria-describedby","aria-label","autoFocus","autoWidth","children","className","defaultOpen","defaultValue","disabled","displayEmpty","error","IconComponent","inputRef","labelId","MenuProps","multiple","name","onBlur","onChange","onClose","onFocus","onOpen","open","readOnly","renderValue","SelectDisplayProps","tabIndex","type","value","variant"],gk=V("div",{name:"MuiSelect",slot:"Select",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[{[`&.${Rs.select}`]:t.select},{[`&.${Rs.select}`]:t[a.variant]},{[`&.${Rs.error}`]:t.error},{[`&.${Rs.multiple}`]:t.multiple}]}})(qy,{[`&.${Rs.select}`]:{height:"auto",minHeight:"1.4375em",textOverflow:"ellipsis",whiteSpace:"nowrap",overflow:"hidden"}}),yk=V("svg",{name:"MuiSelect",slot:"Icon",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.icon,a.variant&&t[`icon${be(a.variant)}`],a.open&&t.iconOpen]}})(Dy),_k=V("input",{shouldForwardProp:e=>hx(e)&&e!=="classes",name:"MuiSelect",slot:"NativeInput",overridesResolver:(e,t)=>t.nativeInput})({bottom:0,left:0,position:"absolute",opacity:0,pointerEvents:"none",width:"100%",boxSizing:"border-box"});function Gf(e,t){return typeof t=="object"&&t!==null?e===t:String(e)===String(t)}function vk(e){return e==null||typeof e=="string"&&!e.trim()}const bk=e=>{const{classes:t,variant:a,disabled:i,multiple:s,open:r,error:n}=e,o={select:["select",a,i&&"disabled",s&&"multiple",n&&"error"],icon:["icon",`icon${be(a)}`,r&&"iconOpen",i&&"disabled"],nativeInput:["nativeInput"]};return Fe(o,fk,t)},wk=b.forwardRef(function(t,a){const{"aria-describedby":i,"aria-label":s,autoFocus:r,autoWidth:n,children:o,className:l,defaultOpen:u,defaultValue:d,disabled:f,displayEmpty:g,error:c=!1,IconComponent:m,inputRef:p,labelId:q,MenuProps:y={},multiple:h,name:_,onBlur:w,onChange:x,onClose:k,onFocus:T,onOpen:S,open:A,readOnly:P,renderValue:j,SelectDisplayProps:O={},tabIndex:z,value:R,variant:F="standard"}=t,M=U(t,hk),[N,C]=Ku({controlled:R,default:d,name:"Select"}),[I,L]=Ku({controlled:A,default:u,name:"Select"}),K=b.useRef(null),X=b.useRef(null),[Ae,ue]=b.useState(null),{current:xe}=b.useRef(A!=null),[re,rt]=b.useState(),Ya=Xe(a,p),Be=b.useCallback($=>{X.current=$,$&&ue($)},[]),xt=Ae==null?void 0:Ae.parentNode;b.useImperativeHandle(Ya,()=>({focus:()=>{X.current.focus()},node:K.current,value:N}),[N]),b.useEffect(()=>{u&&I&&Ae&&!xe&&(rt(n?null:xt.clientWidth),X.current.focus())},[Ae,n]),b.useEffect(()=>{r&&X.current.focus()},[r]),b.useEffect(()=>{if(!q)return;const $=ct(X.current).getElementById(q);if($){const de=()=>{getSelection().isCollapsed&&X.current.focus()};return $.addEventListener("click",de),()=>{$.removeEventListener("click",de)}}},[q]);const ne=($,de)=>{$?S&&S(de):k&&k(de),xe||(rt(n?null:xt.clientWidth),L($))},G=$=>{$.button===0&&($.preventDefault(),X.current.focus(),ne(!0,$))},ge=$=>{ne(!1,$)},qe=b.Children.toArray(o),qt=$=>{const de=qe.find(Ie=>Ie.props.value===$.target.value);de!==void 0&&(C(de.props.value),x&&x($,de))},Dt=$=>de=>{let Ie;if(de.currentTarget.hasAttribute("tabindex")){if(h){Ie=Array.isArray(N)?N.slice():[];const Za=N.indexOf($.props.value);Za===-1?Ie.push($.props.value):Ie.splice(Za,1)}else Ie=$.props.value;if($.props.onClick&&$.props.onClick(de),N!==Ie&&(C(Ie),x)){const Za=de.nativeEvent||de,zc=new Za.constructor(Za.type,Za);Object.defineProperty(zc,"target",{writable:!0,value:{value:Ie,name:_}}),x(zc,$)}h||ne(!1,de)}},kt=$=>{P||[" ","ArrowUp","ArrowDown","Enter"].indexOf($.key)!==-1&&($.preventDefault(),ne(!0,$))},Nt=Ae!==null&&I,wa=$=>{!Nt&&w&&(Object.defineProperty($,"target",{writable:!0,value:{value:N,name:_}}),w($))};delete M["aria-invalid"];let Ue,Le;const Lt=[];let ye=!1;(eo({value:N})||g)&&(j?Ue=j(N):ye=!0);const ra=qe.map($=>{if(!b.isValidElement($))return null;let de;if(h){if(!Array.isArray(N))throw new Error(Ha(2));de=N.some(Ie=>Gf(Ie,$.props.value)),de&&ye&&Lt.push($.props.children)}else de=Gf(N,$.props.value),de&&ye&&(Le=$.props.children);return b.cloneElement($,{"aria-selected":de?"true":"false",onClick:Dt($),onKeyUp:Ie=>{Ie.key===" "&&Ie.preventDefault(),$.props.onKeyUp&&$.props.onKeyUp(Ie)},role:"option",selected:de,value:void 0,"data-value":$.props.value})});ye&&(h?Lt.length===0?Ue=null:Ue=Lt.reduce(($,de,Ie)=>($.push(de),Ie{const{color:t,fontSize:a,classes:i}=e,s={root:["root",t!=="inherit"&&`color${be(t)}`,`fontSize${be(a)}`]};return Fe(s,xk,i)},kk=V("svg",{name:"MuiSvgIcon",slot:"Root",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.root,a.color!=="inherit"&&t[`color${be(a.color)}`],t[`fontSize${be(a.fontSize)}`]]}})(({theme:e,ownerState:t})=>{var a,i,s,r,n,o,l,u,d,f,g,c,m;return{userSelect:"none",width:"1em",height:"1em",display:"inline-block",fill:t.hasSvgAsChild?void 0:"currentColor",flexShrink:0,transition:(a=e.transitions)==null||(i=a.create)==null?void 0:i.call(a,"fill",{duration:(s=e.transitions)==null||(s=s.duration)==null?void 0:s.shorter}),fontSize:{inherit:"inherit",small:((r=e.typography)==null||(n=r.pxToRem)==null?void 0:n.call(r,20))||"1.25rem",medium:((o=e.typography)==null||(l=o.pxToRem)==null?void 0:l.call(o,24))||"1.5rem",large:((u=e.typography)==null||(d=u.pxToRem)==null?void 0:d.call(u,35))||"2.1875rem"}[t.fontSize],color:(f=(g=(e.vars||e).palette)==null||(g=g[t.color])==null?void 0:g.main)!=null?f:{action:(c=(e.vars||e).palette)==null||(c=c.action)==null?void 0:c.active,disabled:(m=(e.vars||e).palette)==null||(m=m.action)==null?void 0:m.disabled,inherit:void 0}[t.color]}}),ky=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiSvgIcon"}),{children:s,className:r,color:n="inherit",component:o="svg",fontSize:l="medium",htmlColor:u,inheritViewBox:d=!1,titleAccess:f,viewBox:g="0 0 24 24"}=i,c=U(i,qk),m=b.isValidElement(s)&&s.type==="svg",p=D({},i,{color:n,component:o,fontSize:l,instanceFontSize:t.fontSize,inheritViewBox:d,viewBox:g,hasSvgAsChild:m}),q={};d||(q.viewBox=g);const y=Dk(p);return v.jsxs(kk,D({as:o,className:Q(y.root,r),focusable:"false",color:u,"aria-hidden":f?void 0:!0,role:f?"img":void 0,ref:a},q,c,m&&s.props,{ownerState:p,children:[m?s.props.children:s,f?v.jsx("title",{children:f}):null]}))});ky.muiName="SvgIcon";const Xf=ky;function Sy(e,t){function a(i,s){return v.jsx(Xf,D({"data-testid":`${t}Icon`,ref:s},i,{children:e}))}return a.muiName=Xf.muiName,b.memo(b.forwardRef(a))}const Sk=Sy(v.jsx("path",{d:"M7 10l5 5 5-5z"}),"ArrowDropDown");function Ck(e){return v.jsx(dT,D({},e,{defaultTheme:il,themeId:Ir}))}function Pk(e){return Pe("MuiInputBase",e)}const Ak=Te("MuiInputBase",["root","formControl","focused","disabled","adornedStart","adornedEnd","error","sizeSmall","multiline","colorSecondary","fullWidth","hiddenLabel","readOnly","input","inputSizeSmall","inputMultiline","inputTypeSearch","inputAdornedStart","inputAdornedEnd","inputHiddenLabel"]),cs=Ak,Ik=["aria-describedby","autoComplete","autoFocus","className","color","components","componentsProps","defaultValue","disabled","disableInjectingGlobalStyles","endAdornment","error","fullWidth","id","inputComponent","inputProps","inputRef","margin","maxRows","minRows","multiline","name","onBlur","onChange","onClick","onFocus","onKeyDown","onKeyUp","placeholder","readOnly","renderSuffix","rows","size","slotProps","slots","startAdornment","type","value"],rl=(e,t)=>{const{ownerState:a}=e;return[t.root,a.formControl&&t.formControl,a.startAdornment&&t.adornedStart,a.endAdornment&&t.adornedEnd,a.error&&t.error,a.size==="small"&&t.sizeSmall,a.multiline&&t.multiline,a.color&&t[`color${be(a.color)}`],a.fullWidth&&t.fullWidth,a.hiddenLabel&&t.hiddenLabel]},nl=(e,t)=>{const{ownerState:a}=e;return[t.input,a.size==="small"&&t.inputSizeSmall,a.multiline&&t.inputMultiline,a.type==="search"&&t.inputTypeSearch,a.startAdornment&&t.inputAdornedStart,a.endAdornment&&t.inputAdornedEnd,a.hiddenLabel&&t.inputHiddenLabel]},Rk=e=>{const{classes:t,color:a,disabled:i,error:s,endAdornment:r,focused:n,formControl:o,fullWidth:l,hiddenLabel:u,multiline:d,readOnly:f,size:g,startAdornment:c,type:m}=e,p={root:["root",`color${be(a)}`,i&&"disabled",s&&"error",l&&"fullWidth",n&&"focused",o&&"formControl",g==="small"&&"sizeSmall",d&&"multiline",c&&"adornedStart",r&&"adornedEnd",u&&"hiddenLabel",f&&"readOnly"],input:["input",i&&"disabled",m==="search"&&"inputTypeSearch",d&&"inputMultiline",g==="small"&&"inputSizeSmall",u&&"inputHiddenLabel",c&&"inputAdornedStart",r&&"inputAdornedEnd",f&&"readOnly"]};return Fe(p,Pk,t)},ol=V("div",{name:"MuiInputBase",slot:"Root",overridesResolver:rl})(({theme:e,ownerState:t})=>D({},e.typography.body1,{color:(e.vars||e).palette.text.primary,lineHeight:"1.4375em",boxSizing:"border-box",position:"relative",cursor:"text",display:"inline-flex",alignItems:"center",[`&.${cs.disabled}`]:{color:(e.vars||e).palette.text.disabled,cursor:"default"}},t.multiline&&D({padding:"4px 0 5px"},t.size==="small"&&{paddingTop:1}),t.fullWidth&&{width:"100%"})),ll=V("input",{name:"MuiInputBase",slot:"Input",overridesResolver:nl})(({theme:e,ownerState:t})=>{const a=e.palette.mode==="light",i=D({color:"currentColor"},e.vars?{opacity:e.vars.opacity.inputPlaceholder}:{opacity:a?.42:.5},{transition:e.transitions.create("opacity",{duration:e.transitions.duration.shorter})}),s={opacity:"0 !important"},r=e.vars?{opacity:e.vars.opacity.inputPlaceholder}:{opacity:a?.42:.5};return D({font:"inherit",letterSpacing:"inherit",color:"currentColor",padding:"4px 0 5px",border:0,boxSizing:"content-box",background:"none",height:"1.4375em",margin:0,WebkitTapHighlightColor:"transparent",display:"block",minWidth:0,width:"100%",animationName:"mui-auto-fill-cancel",animationDuration:"10ms","&::-webkit-input-placeholder":i,"&::-moz-placeholder":i,"&:-ms-input-placeholder":i,"&::-ms-input-placeholder":i,"&:focus":{outline:0},"&:invalid":{boxShadow:"none"},"&::-webkit-search-decoration":{WebkitAppearance:"none"},[`label[data-shrink=false] + .${cs.formControl} &`]:{"&::-webkit-input-placeholder":s,"&::-moz-placeholder":s,"&:-ms-input-placeholder":s,"&::-ms-input-placeholder":s,"&:focus::-webkit-input-placeholder":r,"&:focus::-moz-placeholder":r,"&:focus:-ms-input-placeholder":r,"&:focus::-ms-input-placeholder":r},[`&.${cs.disabled}`]:{opacity:1,WebkitTextFillColor:(e.vars||e).palette.text.disabled},"&:-webkit-autofill":{animationDuration:"5000s",animationName:"mui-auto-fill"}},t.size==="small"&&{paddingTop:1},t.multiline&&{height:"auto",resize:"none",padding:0,paddingTop:0},t.type==="search"&&{MozAppearance:"textfield"})}),Ek=v.jsx(Ck,{styles:{"@keyframes mui-auto-fill":{from:{display:"block"}},"@keyframes mui-auto-fill-cancel":{from:{display:"block"}}}}),jk=b.forwardRef(function(t,a){var i;const s=Ne({props:t,name:"MuiInputBase"}),{"aria-describedby":r,autoComplete:n,autoFocus:o,className:l,components:u={},componentsProps:d={},defaultValue:f,disabled:g,disableInjectingGlobalStyles:c,endAdornment:m,fullWidth:p=!1,id:q,inputComponent:y="input",inputProps:h={},inputRef:_,maxRows:w,minRows:x,multiline:k=!1,name:T,onBlur:S,onChange:A,onClick:P,onFocus:j,onKeyDown:O,onKeyUp:z,placeholder:R,readOnly:F,renderSuffix:M,rows:N,slotProps:C={},slots:I={},startAdornment:L,type:K="text",value:X}=s,Ae=U(s,Ik),ue=h.value!=null?h.value:X,{current:xe}=b.useRef(ue!=null),re=b.useRef(),rt=b.useCallback(oe=>{},[]),Ya=Xe(re,_,h.ref,rt),[Be,xt]=b.useState(!1),ne=jr(),G=Er({props:s,muiFormControl:ne,states:["color","disabled","error","hiddenLabel","size","required","filled"]});G.focused=ne?ne.focused:Be,b.useEffect(()=>{!ne&&g&&Be&&(xt(!1),S&&S())},[ne,g,Be,S]);const ge=ne&&ne.onFilled,qe=ne&&ne.onEmpty,qt=b.useCallback(oe=>{eo(oe)?ge&&ge():qe&&qe()},[ge,qe]);Va(()=>{xe&&qt({value:ue})},[ue,qt,xe]);const Dt=oe=>{if(G.disabled){oe.stopPropagation();return}j&&j(oe),h.onFocus&&h.onFocus(oe),ne&&ne.onFocus?ne.onFocus(oe):xt(!0)},kt=oe=>{S&&S(oe),h.onBlur&&h.onBlur(oe),ne&&ne.onBlur?ne.onBlur(oe):xt(!1)},Nt=(oe,...W)=>{if(!xe){const $=oe.target||re.current;if($==null)throw new Error(Ha(1));qt({value:$.value})}h.onChange&&h.onChange(oe,...W),A&&A(oe,...W)};b.useEffect(()=>{qt(re.current)},[]);const wa=oe=>{re.current&&oe.currentTarget===oe.target&&re.current.focus(),P&&!G.disabled&&P(oe)};let Ue=y,Le=h;k&&Ue==="input"&&(N?Le=D({type:void 0,minRows:N,maxRows:N},Le):Le=D({type:void 0,maxRows:w,minRows:x},Le),Ue=yD);const Lt=oe=>{qt(oe.animationName==="mui-auto-fill-cancel"?re.current:{value:"x"})};b.useEffect(()=>{ne&&ne.setAdornedStart(!!L)},[ne,L]);const ye=D({},s,{color:G.color||"primary",disabled:G.disabled,endAdornment:m,error:G.error,focused:G.focused,formControl:ne,fullWidth:p,hiddenLabel:G.hiddenLabel,multiline:k,size:G.size,startAdornment:L,type:K}),ra=Rk(ye),Mt=I.root||u.Root||ol,na=C.root||d.root||{},Ta=I.input||u.Input||ll;return Le=D({},Le,(i=C.input)!=null?i:d.input),v.jsxs(b.Fragment,{children:[!c&&Ek,v.jsxs(Mt,D({},na,!wr(Mt)&&{ownerState:D({},ye,na.ownerState)},{ref:a,onClick:wa},Ae,{className:Q(ra.root,na.className,l,F&&"MuiInputBase-readOnly"),children:[L,v.jsx(Cc.Provider,{value:null,children:v.jsx(Ta,D({ownerState:ye,"aria-invalid":G.error,"aria-describedby":r,autoComplete:n,autoFocus:o,defaultValue:f,disabled:G.disabled,id:q,onAnimationStart:Lt,name:T,placeholder:R,readOnly:F,required:G.required,rows:N,value:ue,onKeyDown:O,onKeyUp:z,type:K},Le,!wr(Ta)&&{as:Ue,ownerState:D({},ye,Le.ownerState)},{ref:Ya,className:Q(ra.input,Le.className,F&&"MuiInputBase-readOnly"),onBlur:kt,onChange:Nt,onFocus:Dt}))}),m,M?M(D({},G,{startAdornment:L})):null]}))]})}),jc=jk;function Fk(e){return Pe("MuiInput",e)}const Nk=D({},cs,Te("MuiInput",["root","underline","input"])),Es=Nk,Lk=["disableUnderline","components","componentsProps","fullWidth","inputComponent","multiline","slotProps","slots","type"],Mk=e=>{const{classes:t,disableUnderline:a}=e,s=Fe({root:["root",!a&&"underline"],input:["input"]},Fk,t);return D({},t,s)},Ok=V(ol,{shouldForwardProp:e=>sa(e)||e==="classes",name:"MuiInput",slot:"Root",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[...rl(e,t),!a.disableUnderline&&t.underline]}})(({theme:e,ownerState:t})=>{let i=e.palette.mode==="light"?"rgba(0, 0, 0, 0.42)":"rgba(255, 255, 255, 0.7)";return e.vars&&(i=`rgba(${e.vars.palette.common.onBackgroundChannel} / ${e.vars.opacity.inputUnderline})`),D({position:"relative"},t.formControl&&{"label + &":{marginTop:16}},!t.disableUnderline&&{"&:after":{borderBottom:`2px solid ${(e.vars||e).palette[t.color].main}`,left:0,bottom:0,content:'""',position:"absolute",right:0,transform:"scaleX(0)",transition:e.transitions.create("transform",{duration:e.transitions.duration.shorter,easing:e.transitions.easing.easeOut}),pointerEvents:"none"},[`&.${Es.focused}:after`]:{transform:"scaleX(1) translateX(0)"},[`&.${Es.error}`]:{"&:before, &:after":{borderBottomColor:(e.vars||e).palette.error.main}},"&:before":{borderBottom:`1px solid ${i}`,left:0,bottom:0,content:'"\\00a0"',position:"absolute",right:0,transition:e.transitions.create("border-bottom-color",{duration:e.transitions.duration.shorter}),pointerEvents:"none"},[`&:hover:not(.${Es.disabled}, .${Es.error}):before`]:{borderBottom:`2px solid ${(e.vars||e).palette.text.primary}`,"@media (hover: none)":{borderBottom:`1px solid ${i}`}},[`&.${Es.disabled}:before`]:{borderBottomStyle:"dotted"}})}),Bk=V(ll,{name:"MuiInput",slot:"Input",overridesResolver:nl})({}),Cy=b.forwardRef(function(t,a){var i,s,r,n;const o=Ne({props:t,name:"MuiInput"}),{disableUnderline:l,components:u={},componentsProps:d,fullWidth:f=!1,inputComponent:g="input",multiline:c=!1,slotProps:m,slots:p={},type:q="text"}=o,y=U(o,Lk),h=Mk(o),w={root:{ownerState:{disableUnderline:l}}},x=m??d?Rt(m??d,w):w,k=(i=(s=p.root)!=null?s:u.Root)!=null?i:Ok,T=(r=(n=p.input)!=null?n:u.Input)!=null?r:Bk;return v.jsx(jc,D({slots:{root:k,input:T},slotProps:x,fullWidth:f,inputComponent:g,multiline:c,ref:a,type:q},y,{classes:h}))});Cy.muiName="Input";const zk=Cy;function $k(e){return Pe("MuiFilledInput",e)}const Wk=D({},cs,Te("MuiFilledInput",["root","underline","input"])),ei=Wk,Uk=["disableUnderline","components","componentsProps","fullWidth","hiddenLabel","inputComponent","multiline","slotProps","slots","type"],Hk=e=>{const{classes:t,disableUnderline:a}=e,s=Fe({root:["root",!a&&"underline"],input:["input"]},$k,t);return D({},t,s)},Vk=V(ol,{shouldForwardProp:e=>sa(e)||e==="classes",name:"MuiFilledInput",slot:"Root",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[...rl(e,t),!a.disableUnderline&&t.underline]}})(({theme:e,ownerState:t})=>{var a;const i=e.palette.mode==="light",s=i?"rgba(0, 0, 0, 0.42)":"rgba(255, 255, 255, 0.7)",r=i?"rgba(0, 0, 0, 0.06)":"rgba(255, 255, 255, 0.09)",n=i?"rgba(0, 0, 0, 0.09)":"rgba(255, 255, 255, 0.13)",o=i?"rgba(0, 0, 0, 0.12)":"rgba(255, 255, 255, 0.12)";return D({position:"relative",backgroundColor:e.vars?e.vars.palette.FilledInput.bg:r,borderTopLeftRadius:(e.vars||e).shape.borderRadius,borderTopRightRadius:(e.vars||e).shape.borderRadius,transition:e.transitions.create("background-color",{duration:e.transitions.duration.shorter,easing:e.transitions.easing.easeOut}),"&:hover":{backgroundColor:e.vars?e.vars.palette.FilledInput.hoverBg:n,"@media (hover: none)":{backgroundColor:e.vars?e.vars.palette.FilledInput.bg:r}},[`&.${ei.focused}`]:{backgroundColor:e.vars?e.vars.palette.FilledInput.bg:r},[`&.${ei.disabled}`]:{backgroundColor:e.vars?e.vars.palette.FilledInput.disabledBg:o}},!t.disableUnderline&&{"&:after":{borderBottom:`2px solid ${(a=(e.vars||e).palette[t.color||"primary"])==null?void 0:a.main}`,left:0,bottom:0,content:'""',position:"absolute",right:0,transform:"scaleX(0)",transition:e.transitions.create("transform",{duration:e.transitions.duration.shorter,easing:e.transitions.easing.easeOut}),pointerEvents:"none"},[`&.${ei.focused}:after`]:{transform:"scaleX(1) translateX(0)"},[`&.${ei.error}`]:{"&:before, &:after":{borderBottomColor:(e.vars||e).palette.error.main}},"&:before":{borderBottom:`1px solid ${e.vars?`rgba(${e.vars.palette.common.onBackgroundChannel} / ${e.vars.opacity.inputUnderline})`:s}`,left:0,bottom:0,content:'"\\00a0"',position:"absolute",right:0,transition:e.transitions.create("border-bottom-color",{duration:e.transitions.duration.shorter}),pointerEvents:"none"},[`&:hover:not(.${ei.disabled}, .${ei.error}):before`]:{borderBottom:`1px solid ${(e.vars||e).palette.text.primary}`},[`&.${ei.disabled}:before`]:{borderBottomStyle:"dotted"}},t.startAdornment&&{paddingLeft:12},t.endAdornment&&{paddingRight:12},t.multiline&&D({padding:"25px 12px 8px"},t.size==="small"&&{paddingTop:21,paddingBottom:4},t.hiddenLabel&&{paddingTop:16,paddingBottom:17}))}),Kk=V(ll,{name:"MuiFilledInput",slot:"Input",overridesResolver:nl})(({theme:e,ownerState:t})=>D({paddingTop:25,paddingRight:12,paddingBottom:8,paddingLeft:12},!e.vars&&{"&:-webkit-autofill":{WebkitBoxShadow:e.palette.mode==="light"?null:"0 0 0 100px #266798 inset",WebkitTextFillColor:e.palette.mode==="light"?null:"#fff",caretColor:e.palette.mode==="light"?null:"#fff",borderTopLeftRadius:"inherit",borderTopRightRadius:"inherit"}},e.vars&&{"&:-webkit-autofill":{borderTopLeftRadius:"inherit",borderTopRightRadius:"inherit"},[e.getColorSchemeSelector("dark")]:{"&:-webkit-autofill":{WebkitBoxShadow:"0 0 0 100px #266798 inset",WebkitTextFillColor:"#fff",caretColor:"#fff"}}},t.size==="small"&&{paddingTop:21,paddingBottom:4},t.hiddenLabel&&{paddingTop:16,paddingBottom:17},t.multiline&&{paddingTop:0,paddingBottom:0,paddingLeft:0,paddingRight:0},t.startAdornment&&{paddingLeft:0},t.endAdornment&&{paddingRight:0},t.hiddenLabel&&t.size==="small"&&{paddingTop:8,paddingBottom:9})),Py=b.forwardRef(function(t,a){var i,s,r,n;const o=Ne({props:t,name:"MuiFilledInput"}),{components:l={},componentsProps:u,fullWidth:d=!1,inputComponent:f="input",multiline:g=!1,slotProps:c,slots:m={},type:p="text"}=o,q=U(o,Uk),y=D({},o,{fullWidth:d,inputComponent:f,multiline:g,type:p}),h=Hk(o),_={root:{ownerState:y},input:{ownerState:y}},w=c??u?Rt(c??u,_):_,x=(i=(s=m.root)!=null?s:l.Root)!=null?i:Vk,k=(r=(n=m.input)!=null?n:l.Input)!=null?r:Kk;return v.jsx(jc,D({slots:{root:x,input:k},componentsProps:w,fullWidth:d,inputComponent:f,multiline:g,ref:a,type:p},q,{classes:h}))});Py.muiName="Input";const Gk=Py;var Qf;const Xk=["children","classes","className","label","notched"],Qk=V("fieldset")({textAlign:"left",position:"absolute",bottom:0,right:0,top:-5,left:0,margin:0,padding:"0 8px",pointerEvents:"none",borderRadius:"inherit",borderStyle:"solid",borderWidth:1,overflow:"hidden",minWidth:"0%"}),Jk=V("legend")(({ownerState:e,theme:t})=>D({float:"unset",width:"auto",overflow:"hidden"},!e.withLabel&&{padding:0,lineHeight:"11px",transition:t.transitions.create("width",{duration:150,easing:t.transitions.easing.easeOut})},e.withLabel&&D({display:"block",padding:0,height:11,fontSize:"0.75em",visibility:"hidden",maxWidth:.01,transition:t.transitions.create("max-width",{duration:50,easing:t.transitions.easing.easeOut}),whiteSpace:"nowrap","& > span":{paddingLeft:5,paddingRight:5,display:"inline-block",opacity:0,visibility:"visible"}},e.notched&&{maxWidth:"100%",transition:t.transitions.create("max-width",{duration:100,easing:t.transitions.easing.easeOut,delay:50})})));function Yk(e){const{className:t,label:a,notched:i}=e,s=U(e,Xk),r=a!=null&&a!=="",n=D({},e,{notched:i,withLabel:r});return v.jsx(Qk,D({"aria-hidden":!0,className:t,ownerState:n},s,{children:v.jsx(Jk,{ownerState:n,children:r?v.jsx("span",{children:a}):Qf||(Qf=v.jsx("span",{className:"notranslate",children:"​"}))})}))}function Zk(e){return Pe("MuiOutlinedInput",e)}const eS=D({},cs,Te("MuiOutlinedInput",["root","notchedOutline","input"])),qa=eS,tS=["components","fullWidth","inputComponent","label","multiline","notched","slots","type"],aS=e=>{const{classes:t}=e,i=Fe({root:["root"],notchedOutline:["notchedOutline"],input:["input"]},Zk,t);return D({},t,i)},iS=V(ol,{shouldForwardProp:e=>sa(e)||e==="classes",name:"MuiOutlinedInput",slot:"Root",overridesResolver:rl})(({theme:e,ownerState:t})=>{const a=e.palette.mode==="light"?"rgba(0, 0, 0, 0.23)":"rgba(255, 255, 255, 0.23)";return D({position:"relative",borderRadius:(e.vars||e).shape.borderRadius,[`&:hover .${qa.notchedOutline}`]:{borderColor:(e.vars||e).palette.text.primary},"@media (hover: none)":{[`&:hover .${qa.notchedOutline}`]:{borderColor:e.vars?`rgba(${e.vars.palette.common.onBackgroundChannel} / 0.23)`:a}},[`&.${qa.focused} .${qa.notchedOutline}`]:{borderColor:(e.vars||e).palette[t.color].main,borderWidth:2},[`&.${qa.error} .${qa.notchedOutline}`]:{borderColor:(e.vars||e).palette.error.main},[`&.${qa.disabled} .${qa.notchedOutline}`]:{borderColor:(e.vars||e).palette.action.disabled}},t.startAdornment&&{paddingLeft:14},t.endAdornment&&{paddingRight:14},t.multiline&&D({padding:"16.5px 14px"},t.size==="small"&&{padding:"8.5px 14px"}))}),sS=V(Yk,{name:"MuiOutlinedInput",slot:"NotchedOutline",overridesResolver:(e,t)=>t.notchedOutline})(({theme:e})=>{const t=e.palette.mode==="light"?"rgba(0, 0, 0, 0.23)":"rgba(255, 255, 255, 0.23)";return{borderColor:e.vars?`rgba(${e.vars.palette.common.onBackgroundChannel} / 0.23)`:t}}),rS=V(ll,{name:"MuiOutlinedInput",slot:"Input",overridesResolver:nl})(({theme:e,ownerState:t})=>D({padding:"16.5px 14px"},!e.vars&&{"&:-webkit-autofill":{WebkitBoxShadow:e.palette.mode==="light"?null:"0 0 0 100px #266798 inset",WebkitTextFillColor:e.palette.mode==="light"?null:"#fff",caretColor:e.palette.mode==="light"?null:"#fff",borderRadius:"inherit"}},e.vars&&{"&:-webkit-autofill":{borderRadius:"inherit"},[e.getColorSchemeSelector("dark")]:{"&:-webkit-autofill":{WebkitBoxShadow:"0 0 0 100px #266798 inset",WebkitTextFillColor:"#fff",caretColor:"#fff"}}},t.size==="small"&&{padding:"8.5px 14px"},t.multiline&&{padding:0},t.startAdornment&&{paddingLeft:0},t.endAdornment&&{paddingRight:0})),Ay=b.forwardRef(function(t,a){var i,s,r,n,o;const l=Ne({props:t,name:"MuiOutlinedInput"}),{components:u={},fullWidth:d=!1,inputComponent:f="input",label:g,multiline:c=!1,notched:m,slots:p={},type:q="text"}=l,y=U(l,tS),h=aS(l),_=jr(),w=Er({props:l,muiFormControl:_,states:["color","disabled","error","focused","hiddenLabel","size","required"]}),x=D({},l,{color:w.color||"primary",disabled:w.disabled,error:w.error,focused:w.focused,formControl:_,fullWidth:d,hiddenLabel:w.hiddenLabel,multiline:c,size:w.size,type:q}),k=(i=(s=p.root)!=null?s:u.Root)!=null?i:iS,T=(r=(n=p.input)!=null?n:u.Input)!=null?r:rS;return v.jsx(jc,D({slots:{root:k,input:T},renderSuffix:S=>v.jsx(sS,{ownerState:x,className:h.notchedOutline,label:g!=null&&g!==""&&w.required?o||(o=v.jsxs(b.Fragment,{children:[g," ","*"]})):g,notched:typeof m<"u"?m:!!(S.startAdornment||S.filled||S.focused)}),fullWidth:d,inputComponent:f,multiline:c,ref:a,type:q},y,{classes:D({},h,{notchedOutline:null})}))});Ay.muiName="Input";const nS=Ay,oS=["autoWidth","children","classes","className","defaultOpen","displayEmpty","IconComponent","id","input","inputProps","label","labelId","MenuProps","multiple","native","onClose","onOpen","open","renderValue","SelectDisplayProps","variant"],lS=e=>{const{classes:t}=e;return t},Fc={name:"MuiSelect",overridesResolver:(e,t)=>t.root,shouldForwardProp:e=>sa(e)&&e!=="variant",slot:"Root"},uS=V(zk,Fc)(""),dS=V(nS,Fc)(""),cS=V(Gk,Fc)(""),Iy=b.forwardRef(function(t,a){const i=Ne({name:"MuiSelect",props:t}),{autoWidth:s=!1,children:r,classes:n={},className:o,defaultOpen:l=!1,displayEmpty:u=!1,IconComponent:d=Sk,id:f,input:g,inputProps:c,label:m,labelId:p,MenuProps:q,multiple:y=!1,native:h=!1,onClose:_,onOpen:w,open:x,renderValue:k,SelectDisplayProps:T,variant:S="outlined"}=i,A=U(i,oS),P=h?pk:Tk,j=jr(),O=Er({props:i,muiFormControl:j,states:["variant","error"]}),z=O.variant||S,R=D({},i,{variant:z,classes:n}),F=lS(R),M=g||{standard:v.jsx(uS,{ownerState:R}),outlined:v.jsx(dS,{label:m,ownerState:R}),filled:v.jsx(cS,{ownerState:R})}[z],N=Xe(a,M.ref);return v.jsx(b.Fragment,{children:b.cloneElement(M,D({inputComponent:P,inputProps:D({children:r,error:O.error,IconComponent:d,variant:z,type:void 0,multiple:y},h?{id:f}:{autoWidth:s,defaultOpen:l,displayEmpty:u,labelId:p,MenuProps:q,onClose:_,onOpen:w,open:x,renderValue:k,SelectDisplayProps:D({id:f},T)},c,{classes:c?Rt(F,c.classes):F},g?g.props.inputProps:{})},y&&h&&z==="outlined"?{notched:!0}:{},{ref:N,className:Q(M.props.className,o)},!g&&{variant:z},A))})});Iy.muiName="Select";const pS=Iy;var Nc={},Ry={exports:{}};(function(e){function t(a){return a&&a.__esModule?a:{default:a}}e.exports=t,e.exports.__esModule=!0,e.exports.default=e.exports})(Ry);var Ey=Ry.exports,Wl={};const fS={configure:e=>{pc.configure(e)}},mS=Object.freeze(Object.defineProperty({__proto__:null,capitalize:be,createChainedFunction:Vu,createSvgIcon:Sy,debounce:Ro,deprecatedPropType:Xw,isMuiElement:gn,ownerDocument:ct,ownerWindow:ia,requirePropFactory:Qw,setRef:Qn,unstable_ClassNameGenerator:fS,unstable_useEnhancedEffect:Va,unstable_useId:Zw,unsupportedProp:e0,useControlled:Ku,useEventCallback:ci,useForkRef:Xe,useIsFocusVisible:$g},Symbol.toStringTag,{value:"Module"})),hS=g_(mS);var Jf;function jy(){return Jf||(Jf=1,function(e){"use client";Object.defineProperty(e,"__esModule",{value:!0}),Object.defineProperty(e,"default",{enumerable:!0,get:function(){return t.createSvgIcon}});var t=hS}(Wl)),Wl}var gS=Ey;Object.defineProperty(Nc,"__esModule",{value:!0});var Fy=Nc.default=void 0,yS=gS(jy()),_S=v,vS=(0,yS.default)((0,_S.jsx)("path",{d:"M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm-2 15-5-5 1.41-1.41L10 14.17l7.59-7.59L19 8l-9 9z"}),"CheckCircle");Fy=Nc.default=vS;var Lc={},bS=Ey;Object.defineProperty(Lc,"__esModule",{value:!0});var Ny=Lc.default=void 0,wS=bS(jy()),TS=v,xS=(0,wS.default)((0,TS.jsx)("path",{d:"M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61 0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z"}),"Search");Ny=Lc.default=xS;function _a(e){return Array.isArray?Array.isArray(e):Oy(e)==="[object Array]"}const qS=1/0;function DS(e){if(typeof e=="string")return e;let t=e+"";return t=="0"&&1/e==-qS?"-0":t}function kS(e){return e==null?"":DS(e)}function Zt(e){return typeof e=="string"}function Ly(e){return typeof e=="number"}function SS(e){return e===!0||e===!1||CS(e)&&Oy(e)=="[object Boolean]"}function My(e){return typeof e=="object"}function CS(e){return My(e)&&e!==null}function mt(e){return e!=null}function Ul(e){return!e.trim().length}function Oy(e){return e==null?e===void 0?"[object Undefined]":"[object Null]":Object.prototype.toString.call(e)}const PS="Incorrect 'index' type",AS=e=>`Invalid value for key ${e}`,IS=e=>`Pattern length exceeds max of ${e}.`,RS=e=>`Missing ${e} property in key`,ES=e=>`Property 'weight' in key '${e}' must be a positive integer`,Yf=Object.prototype.hasOwnProperty;class jS{constructor(t){this._keys=[],this._keyMap={};let a=0;t.forEach(i=>{let s=By(i);a+=s.weight,this._keys.push(s),this._keyMap[s.id]=s,a+=s.weight}),this._keys.forEach(i=>{i.weight/=a})}get(t){return this._keyMap[t]}keys(){return this._keys}toJSON(){return JSON.stringify(this._keys)}}function By(e){let t=null,a=null,i=null,s=1,r=null;if(Zt(e)||_a(e))i=e,t=Zf(e),a=sd(e);else{if(!Yf.call(e,"name"))throw new Error(RS("name"));const n=e.name;if(i=n,Yf.call(e,"weight")&&(s=e.weight,s<=0))throw new Error(ES(n));t=Zf(n),a=sd(n),r=e.getFn}return{path:t,id:a,weight:s,src:i,getFn:r}}function Zf(e){return _a(e)?e:e.split(".")}function sd(e){return _a(e)?e.join("."):e}function FS(e,t){let a=[],i=!1;const s=(r,n,o)=>{if(mt(r))if(!n[o])a.push(r);else{let l=n[o];const u=r[l];if(!mt(u))return;if(o===n.length-1&&(Zt(u)||Ly(u)||SS(u)))a.push(kS(u));else if(_a(u)){i=!0;for(let d=0,f=u.length;de.score===t.score?e.idx{this._keysMap[a.id]=i})}create(){this.isCreated||!this.docs.length||(this.isCreated=!0,Zt(this.docs[0])?this.docs.forEach((t,a)=>{this._addString(t,a)}):this.docs.forEach((t,a)=>{this._addObject(t,a)}),this.norm.clear())}add(t){const a=this.size();Zt(t)?this._addString(t,a):this._addObject(t,a)}removeAt(t){this.records.splice(t,1);for(let a=t,i=this.size();a{let n=s.getFn?s.getFn(t):this.getFn(t,s.path);if(mt(n)){if(_a(n)){let o=[];const l=[{nestedArrIndex:-1,value:n}];for(;l.length;){const{nestedArrIndex:u,value:d}=l.pop();if(mt(d))if(Zt(d)&&!Ul(d)){let f={v:d,i:u,n:this.norm.get(d)};o.push(f)}else _a(d)&&d.forEach((f,g)=>{l.push({nestedArrIndex:g,value:f})})}i.$[r]=o}else if(Zt(n)&&!Ul(n)){let o={v:n,n:this.norm.get(n)};i.$[r]=o}}}),this.records.push(i)}toJSON(){return{keys:this.keys,records:this.records}}}function zy(e,t,{getFn:a=H.getFn,fieldNormWeight:i=H.fieldNormWeight}={}){const s=new Mc({getFn:a,fieldNormWeight:i});return s.setKeys(e.map(By)),s.setSources(t),s.create(),s}function $S(e,{getFn:t=H.getFn,fieldNormWeight:a=H.fieldNormWeight}={}){const{keys:i,records:s}=e,r=new Mc({getFn:t,fieldNormWeight:a});return r.setKeys(i),r.setIndexRecords(s),r}function tn(e,{errors:t=0,currentLocation:a=0,expectedLocation:i=0,distance:s=H.distance,ignoreLocation:r=H.ignoreLocation}={}){const n=t/e.length;if(r)return n;const o=Math.abs(i-a);return s?n+o/s:o?1:n}function WS(e=[],t=H.minMatchCharLength){let a=[],i=-1,s=-1,r=0;for(let n=e.length;r=t&&a.push([i,s]),i=-1)}return e[r-1]&&r-i>=t&&a.push([i,r-1]),a}const ni=32;function US(e,t,a,{location:i=H.location,distance:s=H.distance,threshold:r=H.threshold,findAllMatches:n=H.findAllMatches,minMatchCharLength:o=H.minMatchCharLength,includeMatches:l=H.includeMatches,ignoreLocation:u=H.ignoreLocation}={}){if(t.length>ni)throw new Error(IS(ni));const d=t.length,f=e.length,g=Math.max(0,Math.min(i,f));let c=r,m=g;const p=o>1||l,q=p?Array(f):[];let y;for(;(y=e.indexOf(t,m))>-1;){let T=tn(t,{currentLocation:y,expectedLocation:g,distance:s,ignoreLocation:u});if(c=Math.min(T,c),m=y+d,p){let S=0;for(;S=P;R-=1){let F=R-1,M=a[e.charAt(F)];if(p&&(q[F]=+!!M),O[R]=(O[R+1]<<1|1)&M,T&&(O[R]|=(h[R+1]|h[R])<<1|1|h[R+1]),O[R]&x&&(_=tn(t,{errors:T,currentLocation:F,expectedLocation:g,distance:s,ignoreLocation:u}),_<=c)){if(c=_,m=F,m<=g)break;P=Math.max(1,2*g-m)}}if(tn(t,{errors:T+1,currentLocation:g,expectedLocation:g,distance:s,ignoreLocation:u})>c)break;h=O}const k={isMatch:m>=0,score:Math.max(.001,_)};if(p){const T=WS(q,o);T.length?l&&(k.indices=T):k.isMatch=!1}return k}function HS(e){let t={};for(let a=0,i=e.length;a{this.chunks.push({pattern:g,alphabet:HS(g),startIndex:c})},f=this.pattern.length;if(f>ni){let g=0;const c=f%ni,m=f-c;for(;g{const{isMatch:y,score:h,indices:_}=US(t,m,p,{location:s+q,distance:r,threshold:n,findAllMatches:o,minMatchCharLength:l,includeMatches:i,ignoreLocation:u});y&&(g=!0),f+=h,y&&_&&(d=[...d,..._])});let c={isMatch:g,score:g?f/this.chunks.length:1};return g&&i&&(c.indices=d),c}}class Ja{constructor(t){this.pattern=t}static isMultiMatch(t){return em(t,this.multiRegex)}static isSingleMatch(t){return em(t,this.singleRegex)}search(){}}function em(e,t){const a=e.match(t);return a?a[1]:null}class VS extends Ja{constructor(t){super(t)}static get type(){return"exact"}static get multiRegex(){return/^="(.*)"$/}static get singleRegex(){return/^=(.*)$/}search(t){const a=t===this.pattern;return{isMatch:a,score:a?0:1,indices:[0,this.pattern.length-1]}}}class KS extends Ja{constructor(t){super(t)}static get type(){return"inverse-exact"}static get multiRegex(){return/^!"(.*)"$/}static get singleRegex(){return/^!(.*)$/}search(t){const i=t.indexOf(this.pattern)===-1;return{isMatch:i,score:i?0:1,indices:[0,t.length-1]}}}class GS extends Ja{constructor(t){super(t)}static get type(){return"prefix-exact"}static get multiRegex(){return/^\^"(.*)"$/}static get singleRegex(){return/^\^(.*)$/}search(t){const a=t.startsWith(this.pattern);return{isMatch:a,score:a?0:1,indices:[0,this.pattern.length-1]}}}class XS extends Ja{constructor(t){super(t)}static get type(){return"inverse-prefix-exact"}static get multiRegex(){return/^!\^"(.*)"$/}static get singleRegex(){return/^!\^(.*)$/}search(t){const a=!t.startsWith(this.pattern);return{isMatch:a,score:a?0:1,indices:[0,t.length-1]}}}class QS extends Ja{constructor(t){super(t)}static get type(){return"suffix-exact"}static get multiRegex(){return/^"(.*)"\$$/}static get singleRegex(){return/^(.*)\$$/}search(t){const a=t.endsWith(this.pattern);return{isMatch:a,score:a?0:1,indices:[t.length-this.pattern.length,t.length-1]}}}class JS extends Ja{constructor(t){super(t)}static get type(){return"inverse-suffix-exact"}static get multiRegex(){return/^!"(.*)"\$$/}static get singleRegex(){return/^!(.*)\$$/}search(t){const a=!t.endsWith(this.pattern);return{isMatch:a,score:a?0:1,indices:[0,t.length-1]}}}class Wy extends Ja{constructor(t,{location:a=H.location,threshold:i=H.threshold,distance:s=H.distance,includeMatches:r=H.includeMatches,findAllMatches:n=H.findAllMatches,minMatchCharLength:o=H.minMatchCharLength,isCaseSensitive:l=H.isCaseSensitive,ignoreLocation:u=H.ignoreLocation}={}){super(t),this._bitapSearch=new $y(t,{location:a,threshold:i,distance:s,includeMatches:r,findAllMatches:n,minMatchCharLength:o,isCaseSensitive:l,ignoreLocation:u})}static get type(){return"fuzzy"}static get multiRegex(){return/^"(.*)"$/}static get singleRegex(){return/^(.*)$/}search(t){return this._bitapSearch.searchIn(t)}}class Uy extends Ja{constructor(t){super(t)}static get type(){return"include"}static get multiRegex(){return/^'"(.*)"$/}static get singleRegex(){return/^'(.*)$/}search(t){let a=0,i;const s=[],r=this.pattern.length;for(;(i=t.indexOf(this.pattern,a))>-1;)a=i+r,s.push([i,a-1]);const n=!!s.length;return{isMatch:n,score:n?0:1,indices:s}}}const rd=[VS,Uy,GS,XS,JS,QS,KS,Wy],tm=rd.length,YS=/ +(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)/,ZS="|";function eC(e,t={}){return e.split(ZS).map(a=>{let i=a.trim().split(YS).filter(r=>r&&!!r.trim()),s=[];for(let r=0,n=i.length;r!!(e[to.AND]||e[to.OR]),sC=e=>!!e[ld.PATH],rC=e=>!_a(e)&&My(e)&&!ud(e),am=e=>({[to.AND]:Object.keys(e).map(t=>({[t]:e[t]}))});function Hy(e,t,{auto:a=!0}={}){const i=s=>{let r=Object.keys(s);const n=sC(s);if(!n&&r.length>1&&!ud(s))return i(am(s));if(rC(s)){const l=n?s[ld.PATH]:r[0],u=n?s[ld.PATTERN]:s[l];if(!Zt(u))throw new Error(AS(l));const d={keyId:sd(l),pattern:u};return a&&(d.searcher=od(u,t)),d}let o={children:[],operator:r[0]};return r.forEach(l=>{const u=s[l];_a(u)&&u.forEach(d=>{o.children.push(i(d))})}),o};return ud(e)||(e=am(e)),i(e)}function nC(e,{ignoreFieldNorm:t=H.ignoreFieldNorm}){e.forEach(a=>{let i=1;a.matches.forEach(({key:s,norm:r,score:n})=>{const o=s?s.weight:null;i*=Math.pow(n===0&&o?Number.EPSILON:n,(o||1)*(t?1:r))}),a.score=i})}function oC(e,t){const a=e.matches;t.matches=[],mt(a)&&a.forEach(i=>{if(!mt(i.indices)||!i.indices.length)return;const{indices:s,value:r}=i;let n={indices:s,value:r};i.key&&(n.key=i.key.src),i.idx>-1&&(n.refIndex=i.idx),t.matches.push(n)})}function lC(e,t){t.score=e.score}function uC(e,t,{includeMatches:a=H.includeMatches,includeScore:i=H.includeScore}={}){const s=[];return a&&s.push(oC),i&&s.push(lC),e.map(r=>{const{idx:n}=r,o={item:t[n],refIndex:n};return s.length&&s.forEach(l=>{l(r,o)}),o})}class _s{constructor(t,a={},i){this.options={...H,...a},this.options.useExtendedSearch,this._keyStore=new jS(this.options.keys),this.setCollection(t,i)}setCollection(t,a){if(this._docs=t,a&&!(a instanceof Mc))throw new Error(PS);this._myIndex=a||zy(this.options.keys,this._docs,{getFn:this.options.getFn,fieldNormWeight:this.options.fieldNormWeight})}add(t){mt(t)&&(this._docs.push(t),this._myIndex.add(t))}remove(t=()=>!1){const a=[];for(let i=0,s=this._docs.length;i-1&&(l=l.slice(0,a)),uC(l,this._docs,{includeMatches:i,includeScore:s})}_searchStringList(t){const a=od(t,this.options),{records:i}=this._myIndex,s=[];return i.forEach(({v:r,i:n,n:o})=>{if(!mt(r))return;const{isMatch:l,score:u,indices:d}=a.searchIn(r);l&&s.push({item:r,idx:n,matches:[{score:u,value:r,norm:o,indices:d}]})}),s}_searchLogical(t){const a=Hy(t,this.options),i=(o,l,u)=>{if(!o.children){const{keyId:f,searcher:g}=o,c=this._findMatches({key:this._keyStore.get(f),value:this._myIndex.getValueForItemAtKeyId(l,f),searcher:g});return c&&c.length?[{idx:u,item:l,matches:c}]:[]}const d=[];for(let f=0,g=o.children.length;f{if(mt(o)){let u=i(a,o,l);u.length&&(r[l]||(r[l]={idx:l,item:o,matches:[]},n.push(r[l])),u.forEach(({matches:d})=>{r[l].matches.push(...d)}))}}),n}_searchObjectList(t){const a=od(t,this.options),{keys:i,records:s}=this._myIndex,r=[];return s.forEach(({$:n,i:o})=>{if(!mt(n))return;let l=[];i.forEach((u,d)=>{l.push(...this._findMatches({key:u,value:n[d],searcher:a}))}),l.length&&r.push({idx:o,item:n,matches:l})}),r}_findMatches({key:t,value:a,searcher:i}){if(!mt(a))return[];let s=[];if(_a(a))a.forEach(({v:r,i:n,n:o})=>{if(!mt(r))return;const{isMatch:l,score:u,indices:d}=i.searchIn(r);l&&s.push({score:u,key:t,value:r,idx:n,norm:o,indices:d})});else{const{v:r,n}=a,{isMatch:o,score:l,indices:u}=i.searchIn(r);o&&s.push({score:l,key:t,value:r,norm:n,indices:u})}return s}}_s.version="6.6.2";_s.createIndex=zy;_s.parseIndex=$S;_s.config=H;_s.parseQuery=Hy;iC(aC);const dC=Di.globalsummary,Ti=Di.plugins,im=Di.status_dict,cC=Object.keys(Ti).length,pC="/aiida-registry/pr-preview/pr-254/",Vy=b.createContext(),Ky=()=>b.useContext(Vy),fC=({children:e})=>{const[t,a]=b.useState(""),[i,s]=b.useState(Ti);return v.jsx(Vy.Provider,{value:{searchQuery:t,setSearchQuery:a,sortedData:i,setSortedData:s},children:e})};function mC(e){const t=[],a=JSON.parse(JSON.stringify(e));return Object.entries(a).forEach(([i,s])=>{s.entry_points=JSON.stringify(s.entry_points),t.push(s)}),t}const hC=mC(Ti);function gC(){const{searchQuery:e,setSearchQuery:t,sortedData:a,setSortedData:i}=Ky(),s=d=>{t(d),d==""&&i(Ti)};let n=new _s(hC,{keys:["name","metadata.description","entry_point_prefix","metadata.author","entry_points"],includeScore:!0,ignoreLocation:!0,threshold:.1}).search(e);const o=n.map(d=>d.item.name),l={};n.forEach(d=>{l[d.item.name]=Ti[d.item.name]});const u=d=>{d.preventDefault(),i(l)};return v.jsxs(v.Fragment,{children:[v.jsx("div",{className:"search",children:v.jsxs("form",{className:"search-form",children:[v.jsx("input",{type:"text",placeholder:"Search for plugins",value:e,label:"search",onChange:d=>s(d.target.value)}),v.jsx("button",{style:{fontSize:"20px"},onClick:d=>{u(d)},children:v.jsx(Ny,{})})]})}),v.jsx("ul",{className:"suggestions-list",children:o.map(d=>v.jsx(Xn,{to:`/${d}`,children:v.jsx("li",{className:"suggestion-item",children:d},d)}))})]})}function yC(){const{searchQuery:e,setSearchQuery:t,sortedData:a,setSortedData:i}=Ky(),[s,r]=b.useState("alpha");document.documentElement.style.scrollBehavior="auto";function n(){var l=window.scrollY;window.onscroll=function(){var u=window.scrollY;l>u?document.querySelector("header").style.top="0":l>150&&(document.querySelector("header").style.top="-155px"),l=u}}n();const o=l=>{r(l);let u;if(l==="commits"){const d=Object.entries(Ti);d.sort(([,f],[,g])=>g.commits_count-f.commits_count),u=Object.fromEntries(d)}else l=="alpha"&&(u=Ti);i(u)};return v.jsxs("main",{className:"fade-enter",children:[v.jsxs("h2",{children:["Registered plugin packages: ",cC]}),v.jsx("div",{className:"globalsummary-box",children:v.jsx("div",{style:{display:"table"},children:dC.map(l=>v.jsxs("span",{className:"badge",style:{display:"table-row",lineHeight:2},children:[v.jsx("span",{style:{display:"table-cell",float:"none",textAlign:"right"},children:v.jsxs("span",{className:`badge-left ${l.colorclass} tooltip`,style:{float:"none",display:"inline",textAlign:"right",border:"none"},children:[l.name,l.tooltip&&v.jsx("span",{className:"tooltiptext",children:l.tooltip})]})}),v.jsx("span",{style:{display:"table-cell",float:"none",textAlign:"left"},children:v.jsxs("span",{className:"badge-right",style:{float:"none",display:"inline",textAlign:"left",border:"none"},children:[l.total_num," plugin",l.total_num!==1?"s":""," in ",l.num_entries," package",l.num_entries!==1?"s":""]})})]},l.name))})}),v.jsxs("div",{id:"entrylist",children:[v.jsxs("div",{style:{display:"flex",flexDirection:"row",alignItems:"center"},children:[v.jsx("h1",{style:{minHeight:"50px",padding:"15px 8px",display:"flex",flexDirection:"column"},children:"Package list"}),v.jsx("div",{style:{minHeight:"50px",padding:"15px 8px",borderRadius:"0 0 0 0",flex:"1"},children:v.jsx(gC,{})}),v.jsx(bx,{style:{minHeight:"50px",minWidth:"600px",padding:"15px 8px",display:"flex"},children:v.jsxs(Iq,{style:{width:"25%"},children:[v.jsx(Fx,{id:"demo-simple-select-label",children:"Sort"}),v.jsxs(pS,{value:s,label:"Sort",onChange:l=>o(l.target.value),children:[v.jsx(Lf,{value:"alpha",children:"Alphabetical"}),v.jsx(Lf,{value:"commits",children:"Commits Count"})]})]})})]}),Object.entries(a).map(([l,u])=>v.jsxs("div",{className:"submenu-entry",children:[v.jsx(Xn,{to:`/${l}`,children:v.jsxs("h2",{style:{display:"inline"},children:[l," "]})}),u.is_installable==="True"&&v.jsxs("div",{className:"classbox",style:{backgroundColor:"transparent"},children:[v.jsx(Fy,{style:{color:"green",marginBottom:"-5"}}),v.jsx("span",{className:"tooltiptext",children:"Plugin successfully installed"})]}),v.jsxs("p",{className:"currentstate",children:[v.jsx("img",{className:"svg-badge",src:`${pC}${im[u.development_status][1]}`,title:im[u.development_status][0]})," ",u.aiida_version&&v.jsx("img",{className:"svg-badge",title:`Compatible with aiida-core ${u.aiida_version}`,src:`https://img.shields.io/badge/AiiDA-${u.aiida_version}-007ec6.svg?logo=${Og}`}),s==="commits"&&v.jsx("img",{className:"svg-badge",style:{padding:"3px"},src:`https://img.shields.io/badge/Yearly%20Commits-${u.commits_count}-007ec6.svg`})]}),v.jsx("p",{children:u.metadata.description}),v.jsxs("ul",{className:"plugin-info",children:[v.jsx("li",{children:v.jsx("a",{href:u.code_home,children:"Source Code"})}),u.documentation_url&&v.jsx("li",{children:v.jsx("a",{href:u.documentation_url,children:"Documentation"})}),v.jsx("li",{children:v.jsx(Xn,{to:`/${l}`,children:"Plugin details"})})]}),u.summaryinfo&&v.jsx(v.Fragment,{children:v.jsx("p",{className:"summaryinfo",children:u.summaryinfo.map(d=>v.jsxs("span",{className:"badge",children:[v.jsx("span",{className:`badge-left ${d.colorclass}`,children:d.text}),v.jsx("span",{className:"badge-right",children:d.count})]},d.text))})})]},l))]})]})}function oi(){return oi=Object.assign?Object.assign.bind():function(e){for(var t=1;t(e[t.toLowerCase()]=t,e),{for:"htmlFor"}),rm={amp:"&",apos:"'",gt:">",lt:"<",nbsp:" ",quot:"“"},vC=["style","script"],bC=/([-A-Z0-9_:]+)(?:\s*=\s*(?:(?:"((?:\\.|[^"])*)")|(?:'((?:\\.|[^'])*)')|(?:\{((?:\\.|{[^}]*?}|[^}])*)\})))?/gi,wC=/mailto:/i,TC=/\n{2,}$/,Gy=/^( *>[^\n]+(\n[^\n]+)*\n*)+\n{2,}/,xC=/^ *> ?/gm,qC=/^ {2,}\n/,DC=/^(?:( *[-*_])){3,} *(?:\n *)+\n/,Xy=/^\s*(`{3,}|~{3,}) *(\S+)?([^\n]*?)?\n([\s\S]+?)\s*\1 *(?:\n *)*\n?/,Qy=/^(?: {4}[^\n]+\n*)+(?:\n *)+\n?/,kC=/^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)/,SC=/^(?:\n *)*\n/,CC=/\r\n?/g,PC=/^\[\^([^\]]+)](:.*)\n/,AC=/^\[\^([^\]]+)]/,IC=/\f/g,RC=/^\s*?\[(x|\s)\]/,Jy=/^ *(#{1,6}) *([^\n]+?)(?: +#*)?(?:\n *)*(?:\n|$)/,Yy=/^ *(#{1,6}) +([^\n]+?)(?: +#*)?(?:\n *)*(?:\n|$)/,Zy=/^([^\n]+)\n *(=|-){3,} *(?:\n *)+\n/,dd=/^ *(?!<[a-z][^ >/]* ?\/>)<([a-z][^ >/]*) ?([^>]*)\/{0}>\n?(\s*(?:<\1[^>]*?>[\s\S]*?<\/\1>|(?!<\1)[\s\S])*?)<\/\1>\n*/i,EC=/&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-fA-F]{1,6});/gi,e_=/^)/,jC=/^(data|aria|x)-[a-z_][a-z\d_.-]*$/,cd=/^ *<([a-z][a-z0-9:]*)(?:\s+((?:<.*?>|[^>])*))?\/?>(?!<\/\1>)(\s*\n)?/i,FC=/^\{.*\}$/,NC=/^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])/,LC=/^<([^ >]+@[^ >]+)>/,MC=/^<([^ >]+:\/[^ >]+)>/,OC=/-([a-z])?/gi,t_=/^(.*\|?.*)\n *(\|? *[-:]+ *\|[-| :]*)\n((?:.*\|.*\n)*)\n?/,BC=/^\[([^\]]*)\]:\s+]+)>?\s*("([^"]*)")?/,zC=/^!\[([^\]]*)\] ?\[([^\]]*)\]/,$C=/^\[([^\]]*)\] ?\[([^\]]*)\]/,WC=/(\[|\])/g,UC=/(\n|^[-*]\s|^#|^ {2,}|^-{2,}|^>\s)/,HC=/\t/g,VC=/^ *\| */,KC=/(^ *\||\| *$)/g,GC=/ *$/,XC=/^ *:-+: *$/,QC=/^ *:-+ *$/,JC=/^ *-+: *$/,YC=/^([*_])\1((?:\[.*?\][([].*?[)\]]|<.*?>(?:.*?<.*?>)?|`.*?`|~+.*?~+|.)*?)\1\1(?!\1)/,ZC=/^([*_])((?:\[.*?\][([].*?[)\]]|<.*?>(?:.*?<.*?>)?|`.*?`|~+.*?~+|.)*?)\1(?!\1|\w)/,eP=/^==((?:\[.*?\]|<.*?>(?:.*?<.*?>)?|`.*?`|.)*?)==/,tP=/^~~((?:\[.*?\]|<.*?>(?:.*?<.*?>)?|`.*?`|.)*?)~~/,aP=/^\\([^0-9A-Za-z\s])/,iP=/^[\s\S]+?(?=[^0-9A-Z\s\u00c0-\uffff&#;.()'"]|\d+\.|\n\n| {2,}\n|\w+:\S|$)/i,sP=/^\n+/,rP=/^([ \t]*)/,nP=/\\([^\\])/g,nm=/ *\n+$/,oP=/(?:^|\n)( *)$/,Oc="(?:\\d+\\.)",Bc="(?:[*+-])";function a_(e){return"( *)("+(e===1?Oc:Bc)+") +"}const i_=a_(1),s_=a_(2);function r_(e){return new RegExp("^"+(e===1?i_:s_))}const lP=r_(1),uP=r_(2);function n_(e){return new RegExp("^"+(e===1?i_:s_)+"[^\\n]*(?:\\n(?!\\1"+(e===1?Oc:Bc)+" )[^\\n]*)*(\\n|$)","gm")}const o_=n_(1),l_=n_(2);function u_(e){const t=e===1?Oc:Bc;return new RegExp("^( *)("+t+") [\\s\\S]+?(?:\\n{2,}(?! )(?!\\1"+t+" (?!"+t+" ))\\n*|\\s*\\n*$)")}const d_=u_(1),c_=u_(2);function om(e,t){const a=t===1,i=a?d_:c_,s=a?o_:l_,r=a?lP:uP;return{t(n,o,l){const u=oP.exec(l);return u&&(o.o||!o._&&!o.u)?i.exec(n=u[1]+n):null},i:Y.HIGH,l(n,o,l){const u=a?+n[2]:void 0,d=n[0].replace(TC,` +`).match(s);let f=!1;return{p:d.map(function(g,c){const m=r.exec(g)[0].length,p=new RegExp("^ {1,"+m+"}","gm"),q=g.replace(p,"").replace(r,""),y=c===d.length-1,h=q.indexOf(` + +`)!==-1||y&&f;f=h;const _=l._,w=l.o;let x;l.o=!0,h?(l._=!1,x=q.replace(nm,` + +`)):(l._=!0,x=q.replace(nm,""));const k=o(x,l);return l._=_,l.o=w,k}),m:a,g:u}},h:(n,o,l)=>e(n.m?"ol":"ul",{key:l.k,start:n.g},n.p.map(function(u,d){return e("li",{key:d},o(u,l))}))}}const dP=/^\[([^\]]*)]\( *((?:\([^)]*\)|[^() ])*) *"?([^)"]*)?"?\)/,cP=/^!\[([^\]]*)]\( *((?:\([^)]*\)|[^() ])*) *"?([^)"]*)?"?\)/,p_=[Gy,Xy,Qy,Jy,Zy,Yy,e_,t_,o_,d_,l_,c_],pP=[...p_,/^[^\n]+(?: \n|\n{2,})/,dd,cd];function fP(e){return e.replace(/[ÀÁÂÃÄÅàáâãäåæÆ]/g,"a").replace(/[çÇ]/g,"c").replace(/[ðÐ]/g,"d").replace(/[ÈÉÊËéèêë]/g,"e").replace(/[ÏïÎîÍíÌì]/g,"i").replace(/[Ññ]/g,"n").replace(/[øØœŒÕõÔôÓóÒò]/g,"o").replace(/[ÜüÛûÚúÙù]/g,"u").replace(/[ŸÿÝý]/g,"y").replace(/[^a-z0-9- ]/gi,"").replace(/ /gi,"-").toLowerCase()}function mP(e){return JC.test(e)?"right":XC.test(e)?"center":QC.test(e)?"left":null}function lm(e,t,a){const i=a.$;a.$=!0;const s=t(e.trim(),a);a.$=i;let r=[[]];return s.forEach(function(n,o){n.type==="tableSeparator"?o!==0&&o!==s.length-1&&r.push([]):(n.type!=="text"||s[o+1]!=null&&s[o+1].type!=="tableSeparator"||(n.v=n.v.replace(GC,"")),r[r.length-1].push(n))}),r}function hP(e,t,a){a._=!0;const i=lm(e[1],t,a),s=e[2].replace(KC,"").split("|").map(mP),r=function(n,o,l){return n.trim().split(` +`).map(function(u){return lm(u,o,l)})}(e[3],t,a);return a._=!1,{S:s,A:r,L:i,type:"table"}}function um(e,t){return e.S[t]==null?{}:{textAlign:e.S[t]}}function Da(e){return function(t,a){return a._?e.exec(t):null}}function ka(e){return function(t,a){return a._||a.u?e.exec(t):null}}function oa(e){return function(t,a){return a._||a.u?null:e.exec(t)}}function js(e){return function(t){return e.exec(t)}}function gP(e,t,a){if(t._||t.u||a&&!a.endsWith(` +`))return null;let i="";e.split(` +`).every(r=>!p_.some(n=>n.test(r))&&(i+=r+` +`,r.trim()));const s=i.trimEnd();return s==""?null:[i,s]}function Ei(e){try{if(decodeURIComponent(e).replace(/[^A-Za-z0-9/:]/g,"").match(/^\s*(javascript|vbscript|data(?!:image)):/i))return}catch{return null}return e}function dm(e){return e.replace(nP,"$1")}function bn(e,t,a){const i=a._||!1,s=a.u||!1;a._=!0,a.u=!0;const r=e(t,a);return a._=i,a.u=s,r}function yP(e,t,a){const i=a._||!1,s=a.u||!1;a._=!1,a.u=!0;const r=e(t,a);return a._=i,a.u=s,r}function _P(e,t,a){return a._=!1,e(t,a)}const Hl=(e,t,a)=>({v:bn(t,e[1],a)});function Vl(){return{}}function Kl(){return null}function vP(...e){return e.filter(Boolean).join(" ")}function Gl(e,t,a){let i=e;const s=t.split(".");for(;s.length&&(i=i[s[0]],i!==void 0);)s.shift();return i||a}var Y;function bP(e,t={}){t.overrides=t.overrides||{},t.slugify=t.slugify||fP,t.namedCodesToUnicode=t.namedCodesToUnicode?oi({},rm,t.namedCodesToUnicode):rm;const a=t.createElement||b.createElement;function i(c,m,...p){const q=Gl(t.overrides,`${c}.props`,{});return a(function(y,h){const _=Gl(h,y);return _?typeof _=="function"||typeof _=="object"&&"render"in _?_:Gl(h,`${y}.component`,y):y}(c,t.overrides),oi({},m,q,{className:vP(m==null?void 0:m.className,q.className)||void 0}),...p)}function s(c){let m=!1;t.forceInline?m=!0:t.forceBlock||(m=UC.test(c)===!1);const p=d(u(m?c:`${c.trimEnd().replace(sP,"")} + +`,{_:m}));for(;typeof p[p.length-1]=="string"&&!p[p.length-1].trim();)p.pop();if(t.wrapper===null)return p;const q=t.wrapper||(m?"span":"div");let y;if(p.length>1||t.forceWrapper)y=p;else{if(p.length===1)return y=p[0],typeof y=="string"?i("span",{key:"outer"},y):y;y=null}return b.createElement(q,{key:"outer"},y)}function r(c){const m=c.match(bC);return m?m.reduce(function(p,q,y){const h=q.indexOf("=");if(h!==-1){const _=function(T){return T.indexOf("-")!==-1&&T.match(jC)===null&&(T=T.replace(OC,function(S,A){return A.toUpperCase()})),T}(q.slice(0,h)).trim(),w=function(T){const S=T[0];return(S==='"'||S==="'")&&T.length>=2&&T[T.length-1]===S?T.slice(1,-1):T}(q.slice(h+1).trim()),x=sm[_]||_,k=p[x]=function(T,S){return T==="style"?S.split(/;\s?/).reduce(function(A,P){const j=P.slice(0,P.indexOf(":"));return A[j.replace(/(-[a-z])/g,O=>O[1].toUpperCase())]=P.slice(j.length+1).trim(),A},{}):T==="href"?Ei(S):(S.match(FC)&&(S=S.slice(1,S.length-1)),S==="true"||S!=="false"&&S)}(_,w);typeof k=="string"&&(dd.test(k)||cd.test(k))&&(p[x]=b.cloneElement(s(k.trim()),{key:y}))}else q!=="style"&&(p[sm[q]||q]=!0);return p},{}):null}const n=[],o={},l={blockQuote:{t:oa(Gy),i:Y.HIGH,l:(c,m,p)=>({v:m(c[0].replace(xC,""),p)}),h:(c,m,p)=>i("blockquote",{key:p.k},m(c.v,p))},breakLine:{t:js(qC),i:Y.HIGH,l:Vl,h:(c,m,p)=>i("br",{key:p.k})},breakThematic:{t:oa(DC),i:Y.HIGH,l:Vl,h:(c,m,p)=>i("hr",{key:p.k})},codeBlock:{t:oa(Qy),i:Y.MAX,l:c=>({v:c[0].replace(/^ {4}/gm,"").replace(/\n+$/,""),M:void 0}),h:(c,m,p)=>i("pre",{key:p.k},i("code",oi({},c.O,{className:c.M?`lang-${c.M}`:""}),c.v))},codeFenced:{t:oa(Xy),i:Y.MAX,l:c=>({O:r(c[3]||""),v:c[4],M:c[2]||void 0,type:"codeBlock"})},codeInline:{t:ka(kC),i:Y.LOW,l:c=>({v:c[2]}),h:(c,m,p)=>i("code",{key:p.k},c.v)},footnote:{t:oa(PC),i:Y.MAX,l:c=>(n.push({I:c[2],j:c[1]}),{}),h:Kl},footnoteReference:{t:Da(AC),i:Y.HIGH,l:c=>({v:c[1],B:`#${t.slugify(c[1])}`}),h:(c,m,p)=>i("a",{key:p.k,href:Ei(c.B)},i("sup",{key:p.k},c.v))},gfmTask:{t:Da(RC),i:Y.HIGH,l:c=>({R:c[1].toLowerCase()==="x"}),h:(c,m,p)=>i("input",{checked:c.R,key:p.k,readOnly:!0,type:"checkbox"})},heading:{t:oa(t.enforceAtxHeadings?Yy:Jy),i:Y.HIGH,l:(c,m,p)=>({v:bn(m,c[2],p),T:t.slugify(c[2]),C:c[1].length}),h:(c,m,p)=>i(`h${c.C}`,{id:c.T,key:p.k},m(c.v,p))},headingSetext:{t:oa(Zy),i:Y.MAX,l:(c,m,p)=>({v:bn(m,c[1],p),C:c[2]==="="?1:2,type:"heading"})},htmlComment:{t:js(e_),i:Y.HIGH,l:()=>({}),h:Kl},image:{t:ka(cP),i:Y.HIGH,l:c=>({D:c[1],B:dm(c[2]),F:c[3]}),h:(c,m,p)=>i("img",{key:p.k,alt:c.D||void 0,title:c.F||void 0,src:Ei(c.B)})},link:{t:Da(dP),i:Y.LOW,l:(c,m,p)=>({v:yP(m,c[1],p),B:dm(c[2]),F:c[3]}),h:(c,m,p)=>i("a",{key:p.k,href:Ei(c.B),title:c.F},m(c.v,p))},linkAngleBraceStyleDetector:{t:Da(MC),i:Y.MAX,l:c=>({v:[{v:c[1],type:"text"}],B:c[1],type:"link"})},linkBareUrlDetector:{t:(c,m)=>m.N?null:Da(NC)(c,m),i:Y.MAX,l:c=>({v:[{v:c[1],type:"text"}],B:c[1],F:void 0,type:"link"})},linkMailtoDetector:{t:Da(LC),i:Y.MAX,l(c){let m=c[1],p=c[1];return wC.test(p)||(p="mailto:"+p),{v:[{v:m.replace("mailto:",""),type:"text"}],B:p,type:"link"}}},orderedList:om(i,1),unorderedList:om(i,2),newlineCoalescer:{t:oa(SC),i:Y.LOW,l:Vl,h:()=>` +`},paragraph:{t:gP,i:Y.LOW,l:Hl,h:(c,m,p)=>i("p",{key:p.k},m(c.v,p))},ref:{t:Da(BC),i:Y.MAX,l:c=>(o[c[1]]={B:c[2],F:c[4]},{}),h:Kl},refImage:{t:ka(zC),i:Y.MAX,l:c=>({D:c[1]||void 0,P:c[2]}),h:(c,m,p)=>i("img",{key:p.k,alt:c.D,src:Ei(o[c.P].B),title:o[c.P].F})},refLink:{t:Da($C),i:Y.MAX,l:(c,m,p)=>({v:m(c[1],p),Z:m(c[0].replace(WC,"\\$1"),p),P:c[2]}),h:(c,m,p)=>o[c.P]?i("a",{key:p.k,href:Ei(o[c.P].B),title:o[c.P].F},m(c.v,p)):i("span",{key:p.k},m(c.Z,p))},table:{t:oa(t_),i:Y.HIGH,l:hP,h:(c,m,p)=>i("table",{key:p.k},i("thead",null,i("tr",null,c.L.map(function(q,y){return i("th",{key:y,style:um(c,y)},m(q,p))}))),i("tbody",null,c.A.map(function(q,y){return i("tr",{key:y},q.map(function(h,_){return i("td",{key:_,style:um(c,_)},m(h,p))}))})))},tableSeparator:{t:function(c,m){return m.$?(m._=!0,VC.exec(c)):null},i:Y.HIGH,l:function(){return{type:"tableSeparator"}},h:()=>" | "},text:{t:js(iP),i:Y.MIN,l:c=>({v:c[0].replace(EC,(m,p)=>t.namedCodesToUnicode[p]?t.namedCodesToUnicode[p]:m)}),h:c=>c.v},textBolded:{t:ka(YC),i:Y.MED,l:(c,m,p)=>({v:m(c[2],p)}),h:(c,m,p)=>i("strong",{key:p.k},m(c.v,p))},textEmphasized:{t:ka(ZC),i:Y.LOW,l:(c,m,p)=>({v:m(c[2],p)}),h:(c,m,p)=>i("em",{key:p.k},m(c.v,p))},textEscaped:{t:ka(aP),i:Y.HIGH,l:c=>({v:c[1],type:"text"})},textMarked:{t:ka(eP),i:Y.LOW,l:Hl,h:(c,m,p)=>i("mark",{key:p.k},m(c.v,p))},textStrikethroughed:{t:ka(tP),i:Y.LOW,l:Hl,h:(c,m,p)=>i("del",{key:p.k},m(c.v,p))}};t.disableParsingRawHTML!==!0&&(l.htmlBlock={t:js(dd),i:Y.HIGH,l(c,m,p){const[,q]=c[3].match(rP),y=new RegExp(`^${q}`,"gm"),h=c[3].replace(y,""),_=(w=h,pP.some(S=>S.test(w))?_P:bn);var w;const x=c[1].toLowerCase(),k=vC.indexOf(x)!==-1;p.N=p.N||x==="a";const T=k?c[3]:_(m,h,p);return p.N=!1,{O:r(c[2]),v:T,G:k,H:k?x:c[1]}},h:(c,m,p)=>i(c.H,oi({key:p.k},c.O),c.G?c.v:m(c.v,p))},l.htmlSelfClosing={t:js(cd),i:Y.HIGH,l:c=>({O:r(c[2]||""),H:c[1]}),h:(c,m,p)=>i(c.H,oi({},c.O,{key:p.k}))});const u=function(c){let m=Object.keys(c);function p(q,y){let h=[],_="";for(;q;){let w=0;for(;w{let{children:t,options:a}=e,i=function(s,r){if(s==null)return{};var n,o,l={},u=Object.keys(s);for(o=0;o=0||(l[n]=s[n]);return l}(e,_C);return b.cloneElement(bP(t,a),i)},cm=Di.entrypointtypes,wP=Di.plugins,pm=Di.status_dict,TP="/aiida-registry/pr-preview/pr-254/";function xP({pluginKey:e}){const t=wP[e];return window.scrollTo(0,0),document.documentElement.style.scrollBehavior="smooth",v.jsx(v.Fragment,{children:v.jsxs("div",{id:"details",className:"fade-enter",children:[v.jsxs("h1",{className:"plugin-header",children:['AiiDA plugin package "',v.jsx("a",{href:t.code_home,children:t.name}),'"']}),v.jsx(Xn,{to:"/",children:v.jsx("p",{style:{display:"inline"},children:"< back to the registry index"})}),v.jsx("h2",{id:"general.information",children:"General information"}),v.jsxs("div",{children:[v.jsxs("p",{children:[v.jsx("strong",{children:"Current state: "}),v.jsx("img",{className:"svg-badge",src:`${TP}${pm[t.development_status][1]}`,title:pm[t.development_status][0]})]}),t.metadata.description&&v.jsxs("p",{children:[v.jsx("strong",{children:"Short description"}),": ",t.metadata.description]}),t.pip_url&&v.jsxs("p",{children:[v.jsx("strong",{children:"How to install"}),": ",v.jsx("code",{children:t.pip_install_cmd})]}),v.jsxs("p",{children:[v.jsx("strong",{children:"Source code"}),": ",v.jsx("a",{href:t.code_home,target:"_blank",children:"Go to the source code repository"})]}),t.documentation_url?v.jsxs("p",{children:[v.jsx("strong",{children:"Documentation"}),": ",v.jsx("a",{href:t.documentation_url,target:"_blank",children:"Go to plugin documentation"})]}):v.jsxs("p",{children:[v.jsx("strong",{children:"Documentation"}),": No documentation provided by the package author"]})]}),v.jsx("h2",{id:"detailed.information",children:"Detailed information"}),Object.keys(t.metadata).length!==0?v.jsxs(v.Fragment,{children:[v.jsxs("p",{children:[v.jsx("strong",{children:"Author(s)"}),": ",t.metadata.author]}),t.metadata.author_email&&v.jsxs("p",{children:[v.jsx("strong",{children:"Contact"}),":"," ",v.jsx("a",{href:`mailto:${t.metadata.author_email}`,children:t.metadata.author_email})]}),v.jsxs("p",{children:[v.jsx("strong",{children:"How to use from python"}),":"," ",v.jsxs("code",{children:["import ",t.package_name]})]}),v.jsxs("p",{children:[v.jsx("strong",{children:"Most recent version"}),": ",t.metadata.version]}),t.aiida_version&&v.jsxs("p",{children:[v.jsx("strong",{children:"Compatibility: "}),v.jsx("img",{className:"svg-badge",src:`https://img.shields.io/badge/AiiDA-${t.aiida_version}-007ec6.svg?logo=${Og}`})]}),t.summaryinfo.length!==0&&v.jsxs(v.Fragment,{children:[v.jsx("h3",{id:"plugins",children:"Plugins provided by the package"}),t.summaryinfo.map(a=>v.jsxs("span",{className:"badge",children:[v.jsx("span",{className:`badge-left ${a.colorclass}`,children:a.text}),v.jsx("span",{className:"badge-right",children:a.count})]},a.text))]}),t.entry_points?Object.entries(t.entry_points).map(([a,i])=>v.jsx(v.Fragment,{children:v.jsxs("div",{children:[v.jsx("h2",{style:{color:"black"},id:a,children:a in cm?v.jsxs(v.Fragment,{children:[cm[a]," ",v.jsxs("span",{className:"entrypointraw",children:["(",a,")"]})]}):a}),v.jsx("ul",{children:Object.entries(i).map(([s,r])=>v.jsxs("li",{children:[v.jsx("h2",{style:{color:"black"},id:`${a}.${s}`,children:s}),typeof r=="string"?v.jsxs("div",{className:"classbox",children:["class",v.jsxs("span",{className:"tooltiptext",children:[" ",r]})]}):v.jsx(qP,{entryPoints:r})]},s))})]},a)})):v.jsx("p",{children:"No entry points defined for this plugin."})]}):v.jsx("div",{id:"description",children:v.jsxs("p",{children:["Detailed information for this package could not be obtained. Ask the plugin author to add a ",v.jsx("code",{children:"setup.json"})," file to the plugin source code."]})})]})})}const qP=({entryPoints:e})=>v.jsxs("div",{style:{overflow:"auto"},children:[v.jsx("table",{children:v.jsx("tbody",{children:v.jsxs("tr",{children:[v.jsx("th",{children:"Class"}),v.jsx("td",{children:v.jsx("code",{children:e.class})})]})})}),v.jsxs("table",{children:[v.jsx("tr",{children:v.jsx("th",{children:"Description"})}),e.description.map(t=>v.jsx("tr",{className:"ep_description",children:v.jsx(pd,{children:t.trim()})}))]}),v.jsxs("table",{children:[v.jsxs("tr",{children:[v.jsx("th",{children:"Inputs"}),v.jsx("th",{children:"Required"}),v.jsx("th",{children:"Valid Types"}),v.jsx("th",{children:"Description"})]}),v.jsx(fm,{spec:e.spec.inputs}),v.jsxs("tr",{children:[v.jsx("th",{children:"Outputs"}),v.jsx("th",{children:"Required"}),v.jsx("th",{children:"Valid Types"}),v.jsx("th",{children:"Description"})]}),v.jsx(fm,{spec:e.spec.outputs})]}),v.jsxs("table",{children:[v.jsx("tr",{children:v.jsx("th",{children:"Exit Codes"})}),v.jsxs("tr",{children:[v.jsx("th",{children:"Status"}),v.jsx("th",{children:"Message"})]}),e.spec.exit_codes.map(t=>v.jsxs("tr",{className:"ep_description",children:[v.jsx("td",{children:t.status}),v.jsx(pd,{children:t.message})]}))]})]}),fm=({spec:e})=>v.jsx(v.Fragment,{children:e.map(t=>v.jsxs("tr",{className:"ep_description",children:[v.jsx("td",{children:t.name}),v.jsx("td",{children:t.required.toString()}),v.jsx("td",{children:t.valid_types}),v.jsx(pd,{children:t.info})]}))}),DP=["addEndListener","appear","children","container","direction","easing","in","onEnter","onEntered","onEntering","onExit","onExited","onExiting","style","timeout","TransitionComponent"];function kP(e,t,a){const i=t.getBoundingClientRect(),s=a&&a.getBoundingClientRect(),r=ia(t);let n;if(t.fakeTransform)n=t.fakeTransform;else{const u=r.getComputedStyle(t);n=u.getPropertyValue("-webkit-transform")||u.getPropertyValue("transform")}let o=0,l=0;if(n&&n!=="none"&&typeof n=="string"){const u=n.split("(")[1].split(")")[0].split(",");o=parseInt(u[4],10),l=parseInt(u[5],10)}return e==="left"?s?`translateX(${s.right+o-i.left}px)`:`translateX(${r.innerWidth+o-i.left}px)`:e==="right"?s?`translateX(-${i.right-s.left-o}px)`:`translateX(-${i.left+i.width-o}px)`:e==="up"?s?`translateY(${s.bottom+l-i.top}px)`:`translateY(${r.innerHeight+l-i.top}px)`:s?`translateY(-${i.top-s.top+i.height-l}px)`:`translateY(-${i.top+i.height-l}px)`}function SP(e){return typeof e=="function"?e():e}function an(e,t,a){const i=SP(a),s=kP(e,t,i);s&&(t.style.webkitTransform=s,t.style.transform=s)}const CP=b.forwardRef(function(t,a){const i=Rr(),s={enter:i.transitions.easing.easeOut,exit:i.transitions.easing.sharp},r={enter:i.transitions.duration.enteringScreen,exit:i.transitions.duration.leavingScreen},{addEndListener:n,appear:o=!0,children:l,container:u,direction:d="down",easing:f=s,in:g,onEnter:c,onEntered:m,onEntering:p,onExit:q,onExited:y,onExiting:h,style:_,timeout:w=r,TransitionComponent:x=Pc}=t,k=U(t,DP),T=b.useRef(null),S=Xe(l.ref,T,a),A=C=>I=>{C&&(I===void 0?C(T.current):C(T.current,I))},P=A((C,I)=>{an(d,C,u),Rc(C),c&&c(C,I)}),j=A((C,I)=>{const L=ds({timeout:w,style:_,easing:f},{mode:"enter"});C.style.webkitTransition=i.transitions.create("-webkit-transform",D({},L)),C.style.transition=i.transitions.create("transform",D({},L)),C.style.webkitTransform="none",C.style.transform="none",p&&p(C,I)}),O=A(m),z=A(h),R=A(C=>{const I=ds({timeout:w,style:_,easing:f},{mode:"exit"});C.style.webkitTransition=i.transitions.create("-webkit-transform",I),C.style.transition=i.transitions.create("transform",I),an(d,C,u),q&&q(C)}),F=A(C=>{C.style.webkitTransition="",C.style.transition="",y&&y(C)}),M=C=>{n&&n(T.current,C)},N=b.useCallback(()=>{T.current&&an(d,T.current,u)},[d,u]);return b.useEffect(()=>{if(g||d==="down"||d==="right")return;const C=Ro(()=>{T.current&&an(d,T.current,u)}),I=ia(T.current);return I.addEventListener("resize",C),()=>{C.clear(),I.removeEventListener("resize",C)}},[d,g,u]),b.useEffect(()=>{g||N()},[g,N]),v.jsx(x,D({nodeRef:T,onEnter:P,onEntered:O,onEntering:j,onExit:R,onExited:F,onExiting:z,addEndListener:M,appear:o,in:g,timeout:w},k,{children:(C,I)=>b.cloneElement(l,D({ref:S,style:D({visibility:C==="exited"&&!g?"hidden":void 0},_,l.props.style)},I))}))}),PP=CP;function AP(e){return Pe("MuiDrawer",e)}Te("MuiDrawer",["root","docked","paper","paperAnchorLeft","paperAnchorRight","paperAnchorTop","paperAnchorBottom","paperAnchorDockedLeft","paperAnchorDockedRight","paperAnchorDockedTop","paperAnchorDockedBottom","modal"]);const IP=["BackdropProps"],RP=["anchor","BackdropProps","children","className","elevation","hideBackdrop","ModalProps","onClose","open","PaperProps","SlideProps","TransitionComponent","transitionDuration","variant"],f_=(e,t)=>{const{ownerState:a}=e;return[t.root,(a.variant==="permanent"||a.variant==="persistent")&&t.docked,t.modal]},EP=e=>{const{classes:t,anchor:a,variant:i}=e,s={root:["root"],docked:[(i==="permanent"||i==="persistent")&&"docked"],modal:["modal"],paper:["paper",`paperAnchor${be(a)}`,i!=="temporary"&&`paperAnchorDocked${be(a)}`]};return Fe(s,AP,t)},jP=V(wy,{name:"MuiDrawer",slot:"Root",overridesResolver:f_})(({theme:e})=>({zIndex:(e.vars||e).zIndex.drawer})),mm=V("div",{shouldForwardProp:sa,name:"MuiDrawer",slot:"Docked",skipVariantsResolver:!1,overridesResolver:f_})({flex:"0 0 auto"}),FP=V(Ty,{name:"MuiDrawer",slot:"Paper",overridesResolver:(e,t)=>{const{ownerState:a}=e;return[t.paper,t[`paperAnchor${be(a.anchor)}`],a.variant!=="temporary"&&t[`paperAnchorDocked${be(a.anchor)}`]]}})(({theme:e,ownerState:t})=>D({overflowY:"auto",display:"flex",flexDirection:"column",height:"100%",flex:"1 0 auto",zIndex:(e.vars||e).zIndex.drawer,WebkitOverflowScrolling:"touch",position:"fixed",top:0,outline:0},t.anchor==="left"&&{left:0},t.anchor==="top"&&{top:0,left:0,right:0,height:"auto",maxHeight:"100%"},t.anchor==="right"&&{right:0},t.anchor==="bottom"&&{top:"auto",left:0,bottom:0,right:0,height:"auto",maxHeight:"100%"},t.anchor==="left"&&t.variant!=="temporary"&&{borderRight:`1px solid ${(e.vars||e).palette.divider}`},t.anchor==="top"&&t.variant!=="temporary"&&{borderBottom:`1px solid ${(e.vars||e).palette.divider}`},t.anchor==="right"&&t.variant!=="temporary"&&{borderLeft:`1px solid ${(e.vars||e).palette.divider}`},t.anchor==="bottom"&&t.variant!=="temporary"&&{borderTop:`1px solid ${(e.vars||e).palette.divider}`})),m_={left:"right",right:"left",top:"down",bottom:"up"};function NP(e){return["left","right"].indexOf(e)!==-1}function LP(e,t){return e.direction==="rtl"&&NP(t)?m_[t]:t}const MP=b.forwardRef(function(t,a){const i=Ne({props:t,name:"MuiDrawer"}),s=Rr(),r={enter:s.transitions.duration.enteringScreen,exit:s.transitions.duration.leavingScreen},{anchor:n="left",BackdropProps:o,children:l,className:u,elevation:d=16,hideBackdrop:f=!1,ModalProps:{BackdropProps:g}={},onClose:c,open:m=!1,PaperProps:p={},SlideProps:q,TransitionComponent:y=PP,transitionDuration:h=r,variant:_="temporary"}=i,w=U(i.ModalProps,IP),x=U(i,RP),k=b.useRef(!1);b.useEffect(()=>{k.current=!0},[]);const T=LP(s,n),A=D({},i,{anchor:n,elevation:d,open:m,variant:_},x),P=EP(A),j=v.jsx(FP,D({elevation:_==="temporary"?d:0,square:!0},p,{className:Q(P.paper,p.className),ownerState:A,children:l}));if(_==="permanent")return v.jsx(mm,D({className:Q(P.root,P.docked,u),ownerState:A,ref:a},x,{children:j}));const O=v.jsx(y,D({in:m,direction:m_[T],timeout:h,appear:k.current},q,{children:j}));return _==="persistent"?v.jsx(mm,D({className:Q(P.root,P.docked,u),ownerState:A,ref:a},x,{children:O})):v.jsx(jP,D({BackdropProps:D({},o,g,{transitionDuration:h}),className:Q(P.root,P.modal,u),open:m,ownerState:A,onClose:c,hideBackdrop:f,ref:a},x,w,{children:O}))}),OP=MP,BP=Di.plugins;function zP({pluginKey:e}){const t=BP[e];function a(){function s(){document.querySelector("header").style.top="-155px",document.querySelector("#sidebar .MuiDrawer-paper").style.marginTop="0"}setTimeout(s,800)}const i=v.jsxs("div",{style:{paddingLeft:"10px"},children:[v.jsx("h1",{children:"Plugin content"}),v.jsx(jf,{}),v.jsx("p",{children:v.jsx("a",{style:{color:"black"},href:"#general.information",onClick:a,children:"General Information"})}),v.jsx("p",{children:v.jsx("a",{style:{color:"black"},href:"#detailed.information",onClick:a,children:"Detailed Information"})}),v.jsx("p",{children:v.jsx("a",{style:{color:"black"},href:"#plugins",onClick:a,children:"Plugins provided by the package"})}),t.entry_points&&Object.entries(t.entry_points).map(([s,r])=>v.jsx(v.Fragment,{children:v.jsx("ul",{children:v.jsxs("li",{children:[v.jsx("a",{style:{color:"black"},href:`#${s}`,onClick:a,children:s}),Object.entries(r).map(([n,o])=>v.jsx("ul",{children:v.jsx("li",{children:v.jsx("a",{style:{color:"black"},href:`#${s}.${n}`,onClick:a,children:n})})},n))]})})})),v.jsx(jf,{})]});return v.jsx(OP,{variant:"permanent",id:"sidebar",anchor:"right",sx:{display:{xs:"none",sm:"block"}},open:!0,children:i})}function $P(){return v.jsxs(v.Fragment,{children:[v.jsx(WP,{}),v.jsx("div",{style:{marginTop:"155px"},children:v.jsx(fC,{children:v.jsxs(Aw,{children:[v.jsx(Wu,{path:"/",element:v.jsx(yC,{})}),v.jsx(Wu,{path:"/:key",element:v.jsx(HP,{})})]})})}),v.jsx(UP,{})]})}function WP(){return v.jsx("header",{children:v.jsxs("div",{style:{paddingLeft:"20px"},children:[v.jsx("h1",{children:v.jsx("a",{href:"http://aiidateam.github.io/aiida-registry",children:v.jsx("img",{src:Bw,height:"70px"})})}),v.jsx("p",{style:{fontSize:"90%"},children:v.jsx("a",{href:"http://github.com/aiidateam/aiida-registry",style:{color:"#999"},children:"[View on GitHub/register your package]"})})]})})}function UP(){return v.jsxs("footer",{className:"footer",children:[v.jsx("hr",{}),"The official ",v.jsx("a",{href:"http://aiidateam.github.io/aiida-registry",children:"registry"})," of ",v.jsx("a",{href:"http://www.aiida.net",children:"AiiDA"})," plugins.",v.jsx("br",{}),"This work is supported by the ",v.jsx("a",{href:"http://nccr-marvel.ch",target:"_blank",children:"MARVEL National Centre for Competence in Research"})," funded by the ",v.jsx("a",{href:"http://www.snf.ch/en",target:"_blank",children:"Swiss National Science Foundation"}),", as well as by the ",v.jsx("a",{href:"http://www.max-centre.eu",target:"_blank",children:"MaX European Centre of Excellence"})," funded by the Horizon 2020 EINFRA-5 program, Grant No. 676598.",v.jsx("br",{}),v.jsx("br",{}),v.jsxs("div",{style:{textAlign:"center"},children:[v.jsx("img",{src:zw,height:"70px"}),"    ",v.jsx("img",{src:$w,height:"70px"})]})]})}function HP(){const{key:e}=gw();b.useEffect(()=>(document.querySelector("footer").style.width="calc(100% - 380px)",()=>{document.querySelector("footer").style.width="calc(100% - 64px)"}),[]);function t(){var a=window.scrollY;window.onscroll=function(){var i=window.scrollY;a>i?(document.querySelector("header").style.top="0",document.querySelector("#sidebar .MuiDrawer-paper").style.marginTop="155px"):a>150&&(document.querySelector("header").style.top="-155px",document.querySelector("#sidebar .MuiDrawer-paper").style.marginTop="0"),a=i}}return t(),v.jsx(v.Fragment,{children:v.jsxs("div",{id:"detailsContainer",children:[v.jsx(xP,{pluginKey:e}),v.jsx(zP,{pluginKey:e})]})})}const VP="/aiida-registry/pr-preview/pr-254/";Xl.createRoot(document.getElementById("root")).render(v.jsx(Wt.StrictMode,{children:v.jsx(Nw,{basename:VP,children:v.jsx($P,{})})})); diff --git a/pr-preview/pr-254/assets/index-ae9b867b.css b/pr-preview/pr-254/assets/index-ae9b867b.css new file mode 100644 index 00000000..87553651 --- /dev/null +++ b/pr-preview/pr-254/assets/index-ae9b867b.css @@ -0,0 +1 @@ +#root{text-align:left}html{height:100%}header{background-color:#000;transition:top .5s;display:block;position:fixed;top:0;z-index:1;margin:0;width:100%;padding:5px 0 14px}body{min-height:100%;font-family:Noto Sans,sans-serif;padding:0;margin:0}main{padding:0 0 0 16px;max-width:1280px;margin:0 auto}p{margin:0;padding:0}a,a:visited{color:#00a;text-decoration:none}a:hover{text-decoration:underline}h1{font-weight:700}.ep_description code{background-color:#d5cccc}#detailsContainer{display:flex;margin-left:50px}#sidebar .MuiDrawer-paper{width:340px;max-height:100vh;background-color:#d3d3d3;border:5px solid gray;transition:.5s;margin-top:155px}.keyword{display:inline;padding:.2em .6em .3em;font-size:75%;color:#000;border-radius:.25em;background-color:#a2cbff}#entrytitle,#entrytitle a{color:#fff}#app-body{max-width:1280px;margin:0 auto}.plugin-header,.plugin-header a{color:#007}h2{padding-top:16px;margin:0;font-size:140%;color:#005}.description{color:#333}#details{margin-top:12px;width:calc(100% - 370px)}.currentstate{color:#666;font-size:90%;margin-bottom:12px}.fade-enter{animation:fadeInAnimation ease .5s;animation-iteration-count:1;animation-fill-mode:forwards}@keyframes fadeInAnimation{0%{opacity:0}to{opacity:1}}.summaryinfo{color:#000;font-size:80%;margin-bottom:12px;margin-top:12px}#entrylist .authors{color:#666;font-size:90%;margin-bottom:9px}#description p{margin-left:0}h3{padding-top:20px;padding-bottom:5px;margin:0;font-size:120%;color:#005}h4{padding-top:20px;padding-bottom:5px;margin:0}ul{margin-block-start:.1em;margin-block-end:.1em}table{background-color:transparent;display:table;border-color:gray;line-height:1.5;border-spacing:0;border-collapse:collapse}th{vertical-align:bottom;border-bottom:2px solid #ddd;padding:8px 16px 8px 0;text-align:left}td{vertical-align:top;padding:8px 16px 8px 0}.footer{display:table;width:calc(100% - 64px);margin-top:16px;padding-top:.5rem;padding-bottom:.5rem;padding-right:32px;padding-left:32px;color:#555;font-size:90%}.globalsummary-box{width:90%;margin:20px;padding:10px 40px;background-color:#0573ff30;border:1px solid rgba(0,65,127,.4);border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px #0000000d;line-height:1.5}.submenu-entry{width:90%;min-height:140px;margin:20px;padding:10px 40px;background-color:#c0bebe30;border:1px solid rgba(152,152,152,.31);border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px #0000000d}.classbox{display:inline-block;background-color:#777;padding:0 .2em;font-size:75%;color:#fff;border-radius:.25em}.classbox .tooltiptext{visibility:hidden;background-color:#000;color:#fff;text-align:center;padding:3px;border-radius:6px;position:absolute;z-index:1}.classbox:hover .tooltiptext{visibility:visible}.entrypointraw{color:#777}.badge{white-space:nowrap;margin-right:5px;display:inline-block;vertical-align:middle;font-family:DejaVu Sans,Verdana,Geneva,sans-serif}span.badge-left{border-radius:.25rem;border-top-right-radius:0;border-bottom-right-radius:0;color:#212529;background-color:#a2cbff;text-shadow:1px 1px 1px rgba(0,0,0,.3);padding:.25em .4em;line-height:1;text-align:center;white-space:nowrap;float:left;display:block}span.badge-right{border-radius:.25rem;border-top-left-radius:0;border-bottom-left-radius:0;color:#fff;background-color:#343a40;padding:.25em .4em;line-height:1;text-align:center;white-space:nowrap;float:left;display:block}.badge-right.light-blue,.badge-left.light-blue{background-color:#a2cbff;color:#212529}.badge-right.light-red,.badge-left.light-red{background-color:#ffa2a2;color:#2b0e0e}.badge-right.red,.badge-left.red{background-color:#e41a1c;color:#fff}.badge-right.blue,.badge-left.blue{background-color:#377eb8;color:#fff}.badge-right.green,.badge-left.green{background-color:#4daf4a;color:#fff}.badge-right.purple,.badge-left.purple{background-color:#984ea3;color:#fff}.badge-right.orange,.badge-left.orange{background-color:#ff7f00;color:#fff}.badge-right.brown,.badge-left.brown{background-color:#a65628;color:#fff}.badge-right.dark-gray,.badge-left.dark-gray{color:#fff;background-color:#343a40}.badge a{text-decoration:none;padding:0;border:0;color:inherit}.badge a:visited,.badge a:active{color:inherit}.badge a:focus,.badge a:hover{color:#ffffff80;mix-blend-mode:difference;text-decoration:none}.svg-badge{vertical-align:middle}.tooltip{position:relative;display:inline-block;border-bottom:1px dotted black}.tooltip .tooltiptext{visibility:hidden;background-color:#fff7af;color:#000;text-align:center;border-radius:6px;padding:5px;position:absolute;z-index:1}.tooltip:hover .tooltiptext{visibility:visible}ul.plugin-info{list-style:none;margin-left:0;padding-left:0;padding-top:5px}ul.plugin-info li{padding-left:1em;text-indent:-1em}ul.plugin-info li:before{content:"→";padding-right:5px}.search{position:relative;margin-bottom:10px}.search-form{display:flex;min-height:50px}input[type=text]{padding:8px;border:1px solid #ccc;border-radius:4px;flex:1}.suggestions-list{position:absolute;list-style:none;padding:0;margin:0;max-height:200px;overflow-y:auto;background-color:#fff;border:1px solid #ccc;border-radius:4px;box-shadow:0 2px 4px #0000001a}.suggestion-item{padding:8px;cursor:pointer;border-bottom:1px solid #ccc}.suggestion-item:last-child{border-bottom:none}.suggestion-item:hover{background-color:#f2f2f2}@media only screen and (min-width : 150px) and (max-width : 780px){.search{width:45%;margin:0 auto}.globalsummary-box{width:90%;margin:20px 0;padding:1px 4px}.globalsummary-box .badge{float:left}#entrylist h1{font-size:25px}.submenu-entry{width:70%;min-height:140px;margin:10px;padding:10px 40px;overflow:auto}header{max-width:100vw}#details{width:100%}#detailsContainer{margin-left:5px}.footer{width:calc(100% - 70px)!important}} diff --git a/pr-preview/pr-254/assets/logo-white-text-16948862.svg b/pr-preview/pr-254/assets/logo-white-text-16948862.svg new file mode 100644 index 00000000..79be8610 --- /dev/null +++ b/pr-preview/pr-254/assets/logo-white-text-16948862.svg @@ -0,0 +1,157 @@ + +image/svg+xml + + + + + + + + + + + + diff --git a/pr-preview/pr-254/favicon.png b/pr-preview/pr-254/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..c607121b667d326ca6c6ee861812d174683921f2 GIT binary patch literal 5286 zcmV;X6j|$uP)f9uR7GYK!NmbaRq_^4?UB3ebOP)h=~ zO10v()~c;75`5Akw@P715EK=aRspG{x87=7Qd_Yo5Zj9OqF9B57P&>sOCiYwwY+C0 z$(+?c5->;}bLN~ga{~E&A0LyMv)5Xay>cFVuZO5AT^zd@={BH?^wQ-3$2eUMaE#+m zw*hIP^n{)e>`!C7KL&znRMQZx=7EoQr`xntBS-a3polQF?@&a?pj-s>Z~oVd&g{}58sB^@c{=x~5gzptV?8#p^f8s9P`~M@l#~9+2k3@MIiY^j5s`EoW0lTz2DrO#H?rhu(5upIO6$n)iRcYHRJ%Xj zmLy8&ae&g?AZFwhl=IVVO1lWFF|tMw+7eB-DT&Z|E&+|vg`S^$0Ofq*k)NmAj6~@i z4lu3HBt*ZRZZj-!9aH<>lWrpt!TXnhP`}ZrE~T?L>nE5fS2Cmag>)Oz9^Q6<(g7zE zkN+L$m2Oj#WFM*p%m{Bvw;^re?Z2vcd?uZD@&URdGArFiw1;=k0HwVPh#PAGJI~e% z#7XbAw_+OSabNY@X|$nDyn6TefsIzXt;NMNKh zvT8O^R$Gj6Iq;q%GOhDvruHdHlkRr$rUM{lj*Fa8yOePFY*Z%!sgL|Iq%`F^6To{8 zP&(jb*A{%9)COjR=g=sFfrUWo_VktWDIGX4W%?4tdk)YTALF`?Sv5N;tGxxZ5LoHD zzJtW$SENi=f_TpXgey~~t1P^Zvf2@dUJYzkU80t{!Kh z+zae;QPYT~h-B~JEe9|gFUQ$u_QV%Ce)3+78MXHlkDm_mw573$lYyBr+6B2u<1Gh3 z^BopweH~%u&Kk;UZp4)H5mq~F4=4qW%d~;F96;%pZW9i1U(Mer3l9VRHc;)HVCnCi zEcoN(``n=oYnU#;4Z8`Y1B7=S5M#_2WSdRs7tpQ9a1`=kZFBhLv=B5N)i&Py_9Y2 zwfX=xcco}$78xUin1CnYnYF~9gj$?Ss0aO)aNxIEw zmC~AJz)QAd2<2WYE#oZ*08sM}DbZ8fcTjWmapI`&2S7v8I#qw0^jMeho&(g?y#nlV zP5#||yD_crOyX=nq(Av4evR?liC3Cl;d(mglT=Q zCyRH1dw~G(Suz9DEXk>ll>s}G(7)JAD}V>_o&x~*{P*MJSzubfbD7rXb&&t%h?PdY zov8!PNLbbbAMZw(V$p&qraobDOiJ*k1B5>K7;qw;%3x~WMWAoClWUNH#2>LHHT-jw zs)UbMoyCmW6hs|c$D0lSurFgS_Selb5WYjl>j9S$>X&%1W=KucxHG|{-(z>r2NRZ^ zV!Z1Bk9-gTrl(AwnvZgeWF&n2ebui4i#wL9xABwrJhT1#X}s$I0KV`dU}S0KHuFyr0oOrcgzT;fPJFI@C9k&`t-#$=$*G4ge6U-HgiB zq#|TYxS7)4i6?J_b{s%ta*KC%ffh9_8hM}2;Q-JShzZ{Y+`geQFlDZ#5v8@i237(4 zK`)~$yv`NNT%hwf06A-iZSH@ z#Febu1Ni)B5ZdmkKu#y=oM(WRl!jMuU$_wX7S31s$fmK;v&*Bgb=*10MTQ{mnJAfdPzand_ptFEe?HF;SQS!&NqYikku5Kd8qK!R>1#Skt6UcE{ z-NztPsLNTXq2vl*C*=Tz%d##}gNJ}~lTq3M^%$C0IWn5A+Wtu+3pRfZm0tiSrdaix zh*6^BPqCbMYRLie$|cj^D>zS-o2@EJ7}bX>M@2i;CXAGC$mGM|Y>>OsXhs~-$H{D% zs^2|hZ=!<|c7Xhf9=#21d=9uMSql@OE?`CnuO6*?T-4zxGH=sqi0?O`=cL<~X5Pi8 zBX!)M_Y*ldp>=@56`A>lHvSEFCm&#nmB}w$7Wmwd<=JE1G^jmf-sW#2###?eKEUai zvPu?iJu{JmJ9L18is082-zt);h-Cq-tXdsiWNCyLk0ScaIV*yr+_I`IWd4@x5qTD; zO2;^fGLX2wCQoeZFkq(LOF-ev-G-`~uc2f)YG5mvaPhk0$X^pZKEEP(xuMe3l~ahQ zDMeMI4*Z{M2DU)vZ5x4@%dx)Bk!to}a=G4``&wHEwCMmtS9I$a7xOKY0j`<%35zR> zBmb4?!FlD9nb9-29vJ+|yRutN8BsNQzpEu!0wX@?;T*JKbk zUj$BeL%zG{Fm`SUwf4yo4p6W>`znNQxo;Ez+x-pE2doS*v~ecw7nL4aup--HNOuC5 zCzJp~9G9CagemgGmWz*gyX8H=h?U~s6$`Eh5-*rwDwiz3e?dU(OIZbQT{-=?Fd z7hy*v8a?ZuS7^cuBJGO%>A!m7_iZBeD}2r(Y^Nc9w%&f)p-1h*ev!Llf$!nW_A|;71`~Q;hT*8$^Q_r+ib!f%0PVn@h#sv^vlez zkZy`hcS6r`f#@BInGdwJo2RI8r;_+Obis=3Yt%wZIYsWsuL$HNdVE{R!fpLfoTw4F zUrTbYMq~se-E6`>V%{MO*Bq*YV0`i-I0@+El%A^M*gKWQks?#{3O?PmS$Vp0)c&^; zl{KtfvIMmnp6wS5`)l*2Sqfy^meY)m&soWdxrHDlG@TVvj0~Xh_-`Kk$$+M)LPZs}=-FUY(tMR4#EAZAK>+O8eF zcjubosHHC|ew~etQ?QtFZL(v5lmw;dxSW4#N$zOSuL19-LMOs?@X0-Z=b9P=6fV!c z;Gm&NBa9hVE|&K)npe&)v!&Z2S~BQGd>`i_OagYhtZo>9F?3n*RKWJ#veO76DmoYY zF^1ivJ+rJ&kzC4*0$-*ftL}=i)|Jb<=;oUqn2&ww*(}nfR^a(r)}5qIoBcPMIdWz zND3IL-fCyw8daA?->oc)d{sm*1CqHh%g$TTE9H0eZlmK*T}4UGxrpA3CUdzo0)|@8 zQ%f6_MX}|;!*smUcm$d8dhZ2qI)H+5LiAS%KeIeoG~kz}ntM5pTjnr@%d;;ikl48%);62xYG2W5j_ z4Wn8-Rwkx!dG@vR3_gfD^!TQo-vOi?A3HbSf=C%~fq|9AzbLF zV6pxkW!|P?;5*$pBXplm$o*Y1Bjt(Bmtx3ch&a1vBl?aXHLKEn2{`Yi?tL44@%w=r z5vOYVwL}wd@N@_%KpgG#wGFW305bpmLC`n{L%!~y9W6Di_c!j1y^_&0xR*5KxD8$= zfgZlBiR#lSKzFzFn9`g=W(B{wf_SUB6s{wSHufMIxEIlhAOV-un3$0b#`>{Z4=i&@ zNt+C*==SyQ65ODw_v7N^gZzD6QN(fySsi!UCSbD*2DOQR$o$RUA(r_rqCcXkxuskD zNhf@6U-Mn0ECF>snW7c)D*}0<#$y7Px^c4qG>Y12My(Yf<2ydiKC)obg{W^XNQzR; z^_Ot4Ok5!F60v9_E|!hUThZ(I3>wM|jPcNVo|+r@#7N!&On!%fs}3OZHl2jXgJ4_> zDSH~*OEmKj&>Rw0IsD(d6#nFfrV%T}U$7#0TL$&-f!<0ACm-0drf7fCnaAr)DRQEZo7F~$76p}v`*X)N_7>#9~q z3N@s*V(O$6gWg*(YL<|15RQ&I2>79H~ z!<=k)@&Po2iHfBR<~A6J$eU@N{07u0Y5B%s4zQ*;`Y%DAbU;zk**FgIr>c=r=UTQ- z$oVDOW}aiB_;9R&MY71lI^mT6Z~4xVnYN~{45&+$NiGmG)cj$`hb4}Jg4_V?bW(1L z*@`c4->2U^!U5Kf-n$b+t^(>)VwS_G^1U^qVvc9u>V%wHM8E5_+|;4B>9~GJ1e3O! zYhO9?z$(Sv=U;cw2dhR!e&w3<@q?D4Jnoi!7n!UjgI;XygEosTR24^lA?PFS+2k~` zv}$$CQr2-&WXs(&m9+09=xi;?ZL7n1`_(p1+Y_5?Ga5G~h{!)8vB-5!-1T>a-jow1 zP=6`tD{kv@fTy%%P`jDc?K;5Xu^MmK9lcV8pF3-pgD4f68MxS~dO!)O{O%chX*5S8 z*w|-qo#&{_y``gr5-t`!WJT~!kU6AwDYpY;?%+L)4K?r!AF>FL*gvBqqNFInIuS4p1-3gten0X+JXw2=lj21)V~=OUDY^ zQMq1Aa$Zezh^4M!RU@Mn7@Vi6t!D9Q))=N-;)ThFrZoaB$teTA0P>eKTJk&@8TnQw zA8fRYDO{d)DWVT5uC{Rh1ZA@zQz}PAeru(=SgMf)o46JpNN@q;W? zT0b<}PSYqsWXSTYYgKe6kejri_W2O>zJ@)q`OVQwo<||E&^@< zuA%)Bw~257Whn-8bbRjL>|5wk>)0VLcP~KWV^A&u2GEp(-g?}h5W%Ddp&HfIim_zf zn~~R3@l2memqyorZ=xT2d}$y@Stu zL3#u-1N}v$zX3i>vs$yVt1E~9dsj*f@E}dirn1gN%pQouXds3V1Bqey_Grn;|90Jm suIAClHeJo5k8!#j;25XN0giG01K-WjuYjG=y#N3J07*qoM6N<$f)pPXI{*Lx literal 0 HcmV?d00001 diff --git a/pr-preview/pr-254/index.html b/pr-preview/pr-254/index.html new file mode 100644 index 00000000..c6649e4c --- /dev/null +++ b/pr-preview/pr-254/index.html @@ -0,0 +1,24 @@ + + + + + + + + AiiDA Plugin Registry + + + + +
+ + + diff --git a/pr-preview/pr-254/plugins_metadata.json b/pr-preview/pr-254/plugins_metadata.json new file mode 100644 index 00000000..b9c70f15 --- /dev/null +++ b/pr-preview/pr-254/plugins_metadata.json @@ -0,0 +1,30114 @@ +{ + "plugins": { + "aiida-QECpWorkChain": { + "code_home": "https://github.com/rikigigi/aiida-QECpWorkChain", + "development_status": "beta", + "entry_point_prefix": "qecpworkchain", + "pip_url": "git+https://github.com/rikigigi/aiida-QECpWorkChain", + "name": "aiida-QECpWorkChain", + "package_name": "aiida_QECpWorkChain", + "hosted_on": "github.com", + "metadata": { + "author": "Riccardo Bertossa", + "author_email": "rbertoss@sissa.it", + "version": "0.2.0a0", + "description": "Car-Parrinello Work Chain with Quantum Espresso. This workchain does a full CP simulation, from the choice of the electronic mass and the timestep, to the choice of the best parallelization options, and then it does the NPT equilibration and a final NVE simulation at the prescribed P and T. Automates as much as possible.", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Natural Language :: English", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=2.0.0,<3.0.0", + "entry_points": { + "aiida.workflows": { + "qecpworkchain.cp": { + "description": [ + "No description available" + ], + "spec": { + "inputs": [ + { + "name": "cp_code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "cp_resources_cg_list", + "required": true, + "valid_types": "List", + "info": "Same as cp_resources_cp_list but when doing a CG. The CG uses a different amount of resource and can use no band or task group parallelization." + }, + { + "name": "cp_resources_cp_list", + "required": true, + "valid_types": "List", + "info": "List of dictionary like the following:\n{\n 'resources' : {\n 'num_machines' : 2,\n 'num_mpiprocs_per_machine' : 48,\n },\n 'wallclock' : 3600,\n 'queue' : 'queue_name',\n 'account': 'account_name',\n}\nc,porturrently only the first element of the list is used.\n'wallclock' is the maximum time that can be requested to the scheduler. This code can decide to ask for less.\n" + }, + { + "name": "ecutwfc", + "required": true, + "valid_types": "Float", + "info": "wavefunction cutoff (Ry), like in the QE input" + }, + { + "name": "pseudo_family", + "required": true, + "valid_types": "Str", + "info": "pseudopotential family to use, as in usual aiida operations" + }, + { + "name": "pw_code", + "required": true, + "valid_types": "Code", + "info": "input pw code (used to calculate force ratio)" + }, + { + "name": "pw_resources_list", + "required": true, + "valid_types": "List", + "info": "Same as cp_resources_cp_list but for pw.x code." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData, TrajectoryData", + "info": "Input structure. If a trajectory is given, the workchain will use its last step to start the CG. If velocities are present, they will be used to initialize the simulation. Note that if you use a trajectory, usually kind information (like mass) are not included, so default values will be used. If you want to include kind information or override those provided with the input structure, use the input structure_kinds" + }, + { + "name": "thermobarostat_points", + "required": true, + "valid_types": "List", + "info": "List of dicts, each with the format [ { \"temperature_K\": 1000, \"pressure_KBar\": 10 , \"equilibration_time_ps\": 5.0, \"thermostat_time_ps\": 5.0} ]. The simulation will loop over this list of dictionaries, in the same order, equilibrating for the specified time at the given P,T point. Every point is repeated if the average T and P are not within the specified ranges" + }, + { + "name": "additional_parameters_cp", + "required": false, + "valid_types": "Dict", + "info": "parameters that will be included in the settings input of the QE CP plugin. These settings will be added on top of the default one. Same format as plugin input" + }, + { + "name": "adjust_ionic_mass", + "required": false, + "valid_types": "Bool", + "info": "Multiply the mass of the ions by the corresponding force ration between the cp forces and pw forces -- that is less than 1. Note that averages of static properties do not depend on the ionic masses." + }, + { + "name": "benchmark_emass_dt_walltime_s", + "required": false, + "valid_types": "Float", + "info": "same as benchmark_parallel_walltime_s but for dermining the best electronic mass and timestep." + }, + { + "name": "benchmark_parallel_walltime_s", + "required": false, + "valid_types": "Float", + "info": "time requested to the scheduler during the test for finding the best parallelization parameters." + }, + { + "name": "cmdline_cp", + "required": false, + "valid_types": "List, NoneType", + "info": "additional command line parameters of the cp verlet caclulations only (for example parallelization options)" + }, + { + "name": "default_nose_frequency", + "required": false, + "valid_types": "Float", + "info": "default nose frequency when a frequency cannot be estimated from the vibrational spectrum" + }, + { + "name": "dt", + "required": false, + "valid_types": "Float, NoneType", + "info": "timestep in atomic units, if not automatically chosen." + }, + { + "name": "dt_start_stop_step", + "required": false, + "valid_types": "List", + "info": "list of timesteps to try. Timesteps are changed to better integrate the equation of motion. When a new electronic mass is selected by this workchain timesteps are automatically adjusted." + }, + { + "name": "emass", + "required": false, + "valid_types": "Float, NoneType", + "info": "electronic mass, atomic mass units, if not automatically chosen" + }, + { + "name": "emass_list", + "required": false, + "valid_types": "List", + "info": "list of electronic masses to try. The emass is selected in order to satisfy the requested CP/DFT force ratio." + }, + { + "name": "initial_atomic_velocities_A_ps", + "required": false, + "valid_types": "ArrayData, NoneType", + "info": "optional input initial velocities in angstrom over picoseconds" + }, + { + "name": "max_slope_const", + "required": false, + "valid_types": "Float", + "info": "max slope in K/ps of the constant of motion linear fit." + }, + { + "name": "max_slope_ekinc", + "required": false, + "valid_types": "Float", + "info": "max slope in K/ps of the ekinc linear fit. If not satisfied try to change emass" + }, + { + "name": "max_slope_min_emass", + "required": false, + "valid_types": "Float", + "info": "minimum possible value of electronic mass that can be set by the max_slope correction routine. Will not go lower than that." + }, + { + "name": "max_slope_min_ps", + "required": false, + "valid_types": "Float", + "info": "minimum required lenght in ps of the last trajectory to do the linear fit on ekinc and const of motion" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "min_traj_steps_vdos", + "required": false, + "valid_types": "Int", + "info": "minimum number of steps to consider the calculated vibrational spectrum maximum valid, to set the thermostat frequency" + }, + { + "name": "minimum_nose_frequency", + "required": false, + "valid_types": "Float", + "info": "minimum nose frequency: if the frequency estimated from the vibrational spectrum is lower than this value, this value is used" + }, + { + "name": "nstep_initial_cg", + "required": false, + "valid_types": "Int", + "info": "At the beginning of the simulation the CP algorithm is not used. This is the number of steps to do using Born-Oppenheimer molecular dynamics algorithm with a conjugate gradient minimization of the electronic ground state." + }, + { + "name": "nstep_parallel_test", + "required": false, + "valid_types": "Int", + "info": "the benchmark simulations will be that long, if performed" + }, + { + "name": "number_of_pw_per_trajectory", + "required": false, + "valid_types": "Int", + "info": "Number of pw submitted for every trajectory during calculation of force ratio." + }, + { + "name": "nve_required_picoseconds", + "required": false, + "valid_types": "Float", + "info": "The equilibrated NVE simulation will last at least this number of picoseconds. How much picoseconds do you want?" + }, + { + "name": "pressure_tolerance", + "required": false, + "valid_types": "Float", + "info": "Pressure tolerance in kBar used to say if the npt is equilibrated. If not setted, use the standard deviation of the P time series" + }, + { + "name": "skip_emass_dt_test", + "required": false, + "valid_types": "Bool", + "info": "" + }, + { + "name": "skip_parallel_test", + "required": false, + "valid_types": "Bool", + "info": "do not run run benchmarks to discover a good internal Quantum Espresso parallelization scheme for the current system" + }, + { + "name": "skip_thermobarostat", + "required": false, + "valid_types": "Bool", + "info": "" + }, + { + "name": "structure_kinds", + "required": false, + "valid_types": "List, NoneType", + "info": "These kinds will be used to override or set the masses of the various atomic types. Note that the workflow, if skip_emass_dt_test is True, will calculate the ratio between cp forces and pw forces and adjust the provided masses automatically according to this ratio. So if you provide this input, make sure to set skip_emass_dt_test to True and set also the inputs emass and dt, or \"bad things can happen\"" + }, + { + "name": "target_force_ratio", + "required": false, + "valid_types": "Float", + "info": "The forces calculated by the Car-Parrinello method are affected by two types of error: one is due to the oscillations of the electrons around the DFT energy minimum, and the second is due to the finite mass of the electronic fluid that produces a _sistematic_ error in the forces, as if the electrons add mass to the ionic core. This second kind of error is can be controlled by this parameter, that tries to adjust the electronic mass to obtain the desidered ratio between CP forces and true DFT forces. Then you may want to modify the ionic mass to correct the leading factor of this error." + }, + { + "name": "temperature_tolerance", + "required": false, + "valid_types": "Float", + "info": "Temperature tolerance in K used to say if the npt is equilibrated. If not setted, use the standard deviation of the T time series" + }, + { + "name": "tempw_initial_random", + "required": false, + "valid_types": "Float, NoneType", + "info": "If provided, sets the initial temperature when randomly initializing the starting velocities." + } + ], + "outputs": [ + { + "name": "cmdline_cp", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "dt", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "emass", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "full_traj", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "kinds", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "nve_prod_traj", + "required": true, + "valid_types": "", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 401, + "message": "The initial cg steps failed. I cannot start to work." + }, + { + "status": 402, + "message": "Nose-Hoover thermostat failed." + }, + { + "status": 403, + "message": "Final cg after Nose-Hoover failed." + }, + { + "status": 404, + "message": "Error in the NVE simulation" + }, + { + "status": 405, + "message": "The simulations are calculating very expensive random numbers. There is something wrong (cutoff? metal? boo?)" + }, + { + "status": 406, + "message": "Wrong input parameters" + }, + { + "status": 407, + "message": "Parallel test was not succesful, maybe there is something more wrong." + }, + { + "status": 408, + "message": "Multiple errors in the simulation that cannot fix." + }, + { + "status": 409, + "message": "This is a bug in the workchain." + } + ] + }, + "class": "aiida_QECpWorkChain.workflow:CpWorkChain" + } + } + }, + "commits_count": 5, + "summaryinfo": [ + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/rikigigi/aiida-QECpWorkChain", + "is_installable": "True" + }, + "aiida-abinit": { + "code_home": "https://github.com/sponce24/aiida-abinit", + "entry_point_prefix": "abinit", + "pip_url": "aiida-abinit", + "plugin_info": "https://raw.github.com/sponce24/aiida-abinit/master/setup.json", + "name": "aiida-abinit", + "package_name": "aiida_abinit", + "hosted_on": "github.com", + "metadata": { + "description": "The AiiDA plugin for ABINIT.", + "author_email": "Samuel Ponce ", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ], + "version": "0.4.0" + }, + "aiida_version": ">=1.6.3,<1.7.0", + "entry_points": { + "aiida.calculations": { + "abinit": { + "description": [ + "AiiDA calculation plugin wrapping the abinit executable." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "The k-point mesh or path" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "The ABINIT input parameters." + }, + { + "name": "pseudos", + "required": true, + "valid_types": "Psp8Data, JthXmlData", + "info": "The pseudopotentials." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "parent_calc_folder", + "required": false, + "valid_types": "RemoteData", + "info": "A remote folder used for restarts." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "Various special settings." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "Various output quantities." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_bands", + "required": false, + "valid_types": "BandsData", + "info": "Final electronic bands if present." + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "Final structure of the calculation if present." + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "Trajectory of various output quantities over the calculation if present." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "Calculation did not produce all expected output files." + }, + { + "status": 101, + "message": "Calculation did not produce the expected `[prefix]o_GSR.nc` output file." + }, + { + "status": 102, + "message": "Calculation did not produce the expected `[prefix]o_HIST.nc` output file." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "The retrieved folder did not contain the `stdout` output file." + }, + { + "status": 301, + "message": "The `stdout` output file could not be read." + }, + { + "status": 302, + "message": "The `stdout` output file could not be parsed." + }, + { + "status": 303, + "message": "The `abipy` `EventsParser` reports that the runw as not completed." + }, + { + "status": 304, + "message": "The output file contains one or more error messages." + }, + { + "status": 305, + "message": "The output file contains one or more warning messages." + }, + { + "status": 312, + "message": "The output structure could not be parsed." + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 500, + "message": "The SCF minimization cycle did not converge." + }, + { + "status": 501, + "message": "The ionic minimization cycle did not converge." + } + ] + }, + "class": "aiida_abinit.calculations:AbinitCalculation" + } + }, + "aiida.parsers": { + "abinit": "aiida_abinit.parsers:AbinitParser" + }, + "aiida.workflows": { + "abinit.base": { + "description": [ + "Base Abinit Workchain to perform a DFT calculation. Validates parameters and restart." + ], + "spec": { + "inputs": [ + { + "name": "abinit", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "An explicit k-points mesh or list. Either this or `kpoints_distance` must be provided." + }, + { + "name": "kpoints_distance", + "required": false, + "valid_types": "Float", + "info": "The minimum desired distance in 1/\u212b between k-points in reciprocal space. The explicit k-point mesh will be generated automatically by a calculation function based on the input structure." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "Various output quantities." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_bands", + "required": false, + "valid_types": "BandsData", + "info": "Final electronic bands if present." + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "Final structure of the calculation if present." + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "Trajectory of various output quantities over the calculation if present." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 201, + "message": "`pseudos` could not be used to get the necessary pseudos." + }, + { + "status": 202, + "message": "Neither the `kpoints` nor the `kpoints_distance` input was specified." + }, + { + "status": 203, + "message": "Neither the `options` nor `automatic_parallelization` input was specified." + }, + { + "status": 204, + "message": "The `metadata.options` did not specify both `resources.num_machines` and `max_wallclock_seconds`." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_abinit.workflows.base:AbinitBaseWorkChain" + } + } + }, + "commits_count": 12, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-abinit", + "is_installable": "True" + }, + "aiida-aenet": { + "code_home": "https://gitlab.com/lattice737/aiida-aenet", + "development_status": "planning", + "entry_point_prefix": "aenet", + "pip_url": "https://gitlab.com/lattice737/aiida-aenet", + "name": "aiida-aenet", + "package_name": "aiida_aenet", + "hosted_on": "gitlab.com", + "metadata": { + "author": "Nicholas Martinez", + "author_email": "nicholasmartinez@my.unt.edu", + "version": "0.1.0", + "description": "AiiDA plugin to construct machine-learning potentials using aenet", + "classifiers": [ + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Development Status :: 0 - Alpha" + ] + }, + "aiida_version": "~=1.2", + "entry_points": { + "aiida.data": { + "aenet.algorithm": "aiida_aenet.data.algorithm:AenetAlgorithm", + "aenet.potential": "aiida_aenet.data.potentials:AenetPotential" + }, + "aiida.calculations": { + "aenet.cur": "aiida_aenet.calculations.cur:CurCalculation", + "aenet.generate": "aiida_aenet.calculations.generate:AenetGenerateCalculation", + "aenet.predict": "aiida_aenet.calculations.predict:AenetPredictCalculation", + "aenet.simulate": "aiida_aenet.calculations.simulate:AenetLammpsMdCalculation", + "aenet.train": "aiida_aenet.calculations.train:AenetTrainCalculation", + "aenet.transform": "aiida_aenet.calculations.transform:TransformCalculation" + }, + "aiida.parsers": { + "aenet.generate": "aiida_aenet.parsers.generate:AenetGenerateParser", + "aenet.predict": "aiida_aenet.parsers.predict:AenetPredictParser", + "aenet.simulate": "aiida_aenet.parsers.simulate:AenetLammpsMdParser", + "aenet.train": "aiida_aenet.parsers.train:AenetTrainParser" + }, + "aiida.workflows": { + "aenet.build_reference": "aiida_aenet.workflows.build_reference:BuildReferenceWorkChain", + "aenet.compare_simulations": "aiida_aenet.workflows.compare_simulations:CompareSimulationsWorkChain", + "aenet.make_potential": "aiida_aenet.workflows.make_potential:MakePotentialWorkChain", + "aenet.make_structures": "aiida_aenet.workflows.make_structures:MakeStructuresWorkChain" + }, + "aenet.potentials": { + "lammps.ann": "aiida_aenet.data.potentials.lammps:ANN" + } + }, + "commits_count": 1, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 6 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 4 + }, + { + "colorclass": "red", + "text": "Data", + "count": 2 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 4 + }, + { + "colorclass": "orange", + "text": "Other (Aenet potentials)", + "count": 1 + } + ], + "pip_install_cmd": "pip install https://gitlab.com/lattice737/aiida-aenet" + }, + "aiida-alloy": { + "code_home": "https://github.com/DanielMarchand/aiida-alloy", + "development_status": "beta", + "entry_point_prefix": "alloy", + "pip_url": "git+https://github.com/DanielMarchand/aiida-alloy", + "name": "aiida-alloy", + "package_name": "aiida_alloy", + "hosted_on": "github.com", + "metadata": { + "author": "The AiiDA developers group", + "author_email": "", + "version": "0.1.0a0", + "description": "Aiida Workflows for Elastic Constants using Quantum Espresso", + "classifiers": [ + "Programming Language :: Python" + ] + }, + "aiida_version": ">=1.0.0a0", + "entry_points": { + "aiida.workflows": { + "elastic": "aiida_alloy.workflows.ElasticWorkChain:ElasticWorkChain" + } + }, + "commits_count": 1, + "summaryinfo": [ + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/DanielMarchand/aiida-alloy", + "is_installable": "False" + }, + "aiida-ase": { + "code_home": "https://github.com/aiidateam/aiida-ase", + "documentation_url": "https://aiida-ase.readthedocs.io/", + "entry_point_prefix": "ase", + "pip_url": "aiida-ase", + "plugin_info": "https://raw.github.com/aiidateam/aiida-ase/master/setup.json", + "name": "aiida-ase", + "package_name": "aiida_ase", + "hosted_on": "github.com", + "metadata": { + "description": "The official AiiDA plugin for ASE.", + "author_email": "The AiiDA team ", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering" + ], + "version": "2.0.0" + }, + "aiida_version": ">=1.6,<2.0", + "entry_points": { + "aiida.calculations": { + "ase.ase": { + "description": [ + "`CalcJob` implementation that can be used to wrap around the ASE calculators." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Input parameters for the namelists." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "The k-points to use for the calculation." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "Optional settings that control the plugin." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "array", + "required": false, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 300, + "message": "One of the expected output files was missing." + }, + { + "status": 301, + "message": "The log file from the DFT code was not written out." + }, + { + "status": 302, + "message": "Relaxation did not complete." + }, + { + "status": 303, + "message": "SCF Failed." + }, + { + "status": 305, + "message": "Cannot identify what went wrong." + }, + { + "status": 306, + "message": "gpaw could not find the PAW potentials." + }, + { + "status": 307, + "message": "Attribute Error found in the stderr file." + }, + { + "status": 308, + "message": "Fermi level is infinite." + }, + { + "status": 400, + "message": "The calculation ran out of walltime." + } + ] + }, + "class": "aiida_ase.calculations.ase:AseCalculation" + } + }, + "aiida.parsers": { + "ase.ase": "aiida_ase.parsers.ase:AseParser", + "ase.gpaw": "aiida_ase.parsers.gpaw:GpawParser" + }, + "aiida.workflows": { + "ase.gpaw.base": { + "description": [ + "Workchain to run a GPAW calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "gpaw", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "k-points to use for the calculation." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "array", + "required": false, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_ase.workflows.base:GpawBaseWorkChain" + } + } + }, + "commits_count": 8, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 2 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-ase", + "is_installable": "True" + }, + "aiida-autocas": { + "entry_point_prefix": "autocas", + "code_home": "https://github.com/microsoft/aiida-autocas", + "version_file": "https://raw.githubusercontent.com/microsoft/aiida-autocas/main/aiida_autocas/__init__.py", + "pip_url": "git+https://github.com/microsoft/aiida-autocas", + "name": "aiida-autocas", + "package_name": "aiida_autocas", + "hosted_on": "github.com", + "metadata": { + "version": "0.1.0", + "description": "AiiDA AutoCAS Plugin", + "classifiers": [] + }, + "aiida_version": ">=2.0,<3", + "entry_points": { + "aiida.calculations": { + "autocas": "aiida_autocas.calculations:AutoCASCalculation" + }, + "aiida.parsers": { + "autocas": "aiida_autocas.parsers:AutoCASParser" + } + }, + "commits_count": 11, + "development_status": "planning", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/microsoft/aiida-autocas" + }, + "aiida-bands-inspect": { + "code_home": "https://github.com/greschd/aiida-bands-inspect", + "documentation_url": "https://aiida-bands-inspect.readthedocs.io", + "entry_point_prefix": "bands_inspect", + "pip_url": "aiida-bands-inspect", + "name": "aiida-bands-inspect", + "package_name": "aiida_bands_inspect", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA Plugin for running bands_inspect", + "author": "Dominik Gresch", + "author_email": "greschd@gmx.ch", + "license": "Apache 2.0", + "home_page": "https://aiida-bands-inspect.readthedocs.io", + "classifiers": [ + "Development Status :: 4 - Beta", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "0.4.0" + }, + "aiida_version": null, + "entry_points": { + "aiida.calculations": { + "bands_inspect.align": { + "description": [ + "Calculation class for the ``bands-inspect align`` command.", + "", + " Arguments", + " ---------", + " bands1 : aiida.orm.data.array.bands.BandsData", + " First band structure to compare.", + " bands2 : aiida.orm.data.array.bands.BandsData", + " Second band structure to compare." + ], + "spec": { + "inputs": [ + { + "name": "bands1", + "required": true, + "valid_types": "BandsData", + "info": "First bandstructure which is to be aligned" + }, + { + "name": "bands2", + "required": true, + "valid_types": "BandsData", + "info": "Second bandstructure which is to be aligned" + }, + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "bands1_shifted", + "required": true, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "bands2_shifted", + "required": true, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "difference", + "required": true, + "valid_types": "Float", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "shift", + "required": true, + "valid_types": "Float", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "At least one of the expected output files is missing from the retrieved folder." + }, + { + "status": 220, + "message": "The text output file content is not in the expected format." + } + ] + }, + "class": "aiida_bands_inspect.calculations.align:AlignCalculation" + }, + "bands_inspect.difference": { + "description": [ + "Calculation class for the ``bands-inspect difference`` command.", + "", + " Arguments", + " ---------", + " bands1 : aiida.orm.nodes.data.array.bands.BandsData", + " First band structure to compare.", + " bands2 : aiida.orm.nodes.data.array.bands.BandsData", + " Second band structure to compare." + ], + "spec": { + "inputs": [ + { + "name": "bands1", + "required": true, + "valid_types": "BandsData", + "info": "First bandstructure which is to be compared" + }, + { + "name": "bands2", + "required": true, + "valid_types": "BandsData", + "info": "Second bandstructure which is to be compared" + }, + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "difference", + "required": true, + "valid_types": "Float", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "The retrieved folder does not contain the difference output file." + } + ] + }, + "class": "aiida_bands_inspect.calculations.difference:DifferenceCalculation" + }, + "bands_inspect.plot": { + "description": [ + "Calculation class for the ``bands_inspect plot`` command.", + "", + " Arguments", + " ---------", + " bands1 : aiida.orm.nodes.data.array.bands.BandsData", + " First band structure to plot.", + " bands2 : aiida.orm.nodes.data.array.bands.BandsData", + " Second band structure to plot." + ], + "spec": { + "inputs": [ + { + "name": "bands1", + "required": true, + "valid_types": "BandsData", + "info": "First bandstructure which is to be plotted" + }, + { + "name": "bands2", + "required": true, + "valid_types": "BandsData", + "info": "Second bandstructure which is to be plotted" + }, + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "plot", + "required": true, + "valid_types": "SinglefileData", + "info": "The created band-structure comparison plot." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "The retrieved folder does not contain the plot output file." + } + ] + }, + "class": "aiida_bands_inspect.calculations.plot:PlotCalculation" + } + }, + "aiida.parsers": { + "bands_inspect.bands": "aiida_bands_inspect.parsers.bands:BandsParser", + "bands_inspect.difference": "aiida_bands_inspect.parsers.difference:DifferenceParser", + "bands_inspect.align": "aiida_bands_inspect.parsers.align:AlignParser", + "bands_inspect.plot": "aiida_bands_inspect.parsers.plot:PlotParser" + } + }, + "commits_count": 0, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 3 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 4 + } + ], + "pip_install_cmd": "pip install aiida-bands-inspect", + "is_installable": "True" + }, + "aiida-bigdft": { + "code_home": "https://github.com/BigDFT-group/aiida-bigdft-plugin", + "development_status": "beta", + "entry_point_prefix": "bigdft", + "pip_url": "aiida-bigdft", + "plugin_info": "https://raw.github.com/BigDFT-group/aiida-bigdft-plugin/master/setup.json", + "name": "aiida-bigdft", + "package_name": "aiida_bigdft", + "hosted_on": "github.com", + "metadata": { + "description": "Aiida plugin for BigDFT code", + "author": "The BigDFT Team", + "author_email": "bigdft-developers@lists.launchpad.net", + "license": "MIT", + "home_page": "https://github.com/BigDFT-group/aiida-bigdft-plugin", + "classifiers": [ + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python" + ], + "version": "0.2.6" + }, + "aiida_version": ">=1.1.1,<2.0.0", + "entry_points": { + "aiida.calculations": { + "bigdft": { + "description": [ + "AiiDA calculation plugin wrapping the BigDFT python interface." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "BigDFTParameters", + "info": "Command line parameters for BigDFT" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "StructureData struct" + }, + { + "name": "extra_retrieved_files", + "required": false, + "valid_types": "List", + "info": "" + }, + { + "name": "kpoints", + "required": false, + "valid_types": "Dict", + "info": "kpoint mesh or kpoint path" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "pseudos", + "required": false, + "valid_types": "List", + "info": "" + }, + { + "name": "structurefile", + "required": false, + "valid_types": "Str", + "info": "xyz file" + } + ], + "outputs": [ + { + "name": "bigdft_logfile", + "required": true, + "valid_types": "BigDFTLogfile", + "info": "BigDFT log file as a dict" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 100, + "message": "Calculation did not produce all expected output files." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + } + ] + }, + "class": "aiida_bigdft.calculations.bigdft:BigDFTCalculation" + }, + "bigdft.postscript": { + "description": [ + "AiiDA calculation to add post treatments to a computation workcahin.", + " post treatment scripts are to be registered as codes in aiida.", + " They are python scripts accepting one argument : a remotefolder where data is stored", + " Output files are not specified and can be added to the extra_retrieved_files list" + ], + "spec": { + "inputs": [ + { + "name": "bigdft_data_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Folder to the BigDFT data folder" + }, + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "retrieved_files", + "required": false, + "valid_types": "List", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 101, + "message": "Script execution failed" + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + } + ] + }, + "class": "aiida_bigdft.calculations.postscript:ScriptCalculation" + } + }, + "aiida.cmdline.data": { + "bigdft": "aiida_bigdft.cli:data_cli" + }, + "aiida.data": { + "bigdft": "aiida_bigdft.data:BigDFTParameters", + "bigdft_logfile": "aiida_bigdft.data:BigDFTLogfile" + }, + "aiida.parsers": { + "bigdft": "aiida_bigdft.parsers:BigDFTParser" + }, + "aiida.workflows": { + "bigdft": { + "description": [ + "No description available" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "BigDFTParameters", + "info": "Command line parameters for BigDFT" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "StructureData struct" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "extra_retrieved_files", + "required": false, + "valid_types": "List", + "info": "" + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "Dict", + "info": "kpoint mesh or kpoint path" + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "pseudos", + "required": false, + "valid_types": "List", + "info": "" + }, + { + "name": "run_opts", + "required": false, + "valid_types": "Dict", + "info": "metadata" + }, + { + "name": "show_warnings", + "required": false, + "valid_types": "Bool", + "info": "turn the warnings on/off." + }, + { + "name": "structurefile", + "required": false, + "valid_types": "Str", + "info": "xyz file" + } + ], + "outputs": [ + { + "name": "bigdft_logfile", + "required": true, + "valid_types": "BigDFTLogfile", + "info": "BigDFT log file as a dict" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "BigDFT input error" + }, + { + "status": 200, + "message": "BigDFT runtime error" + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_bigdft.workflows.base:BigDFTBaseWorkChain" + }, + "bigdft.relax": { + "description": [ + "Structure relaxation workchain." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "relax", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "StructureData struct" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "extra_retrieved_files", + "required": false, + "valid_types": "List", + "info": "" + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "Dict", + "info": "kpoint mesh or kpoint path" + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "parameters", + "required": false, + "valid_types": "BigDFTParameters", + "info": "param dictionary" + }, + { + "name": "pseudos", + "required": false, + "valid_types": "List", + "info": "" + }, + { + "name": "run_opts", + "required": false, + "valid_types": "Dict", + "info": "metadata" + }, + { + "name": "show_warnings", + "required": false, + "valid_types": "Bool", + "info": "turn the warnings on/off." + }, + { + "name": "structurefile", + "required": false, + "valid_types": "Str", + "info": "xyz file" + } + ], + "outputs": [ + { + "name": "bigdft_logfile", + "required": true, + "valid_types": "BigDFTLogfile", + "info": "BigDFT log file as a dict" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "relaxed_structure", + "required": false, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "total_energy", + "required": false, + "valid_types": "Float", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 101, + "message": "Subprocess failed for relaxation" + } + ] + }, + "class": "aiida_bigdft.workflows.relax:BigDFTRelaxWorkChain" + } + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 2 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 2 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 2 + }, + { + "colorclass": "orange", + "text": "Other (Data commands)", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-bigdft", + "is_installable": "True" + }, + "aiida-castep": { + "code_home": "https://gitlab.com/bz1/aiida-castep", + "development_status": "stable", + "documentation_url": "https://aiida-castep.readthedocs.io/", + "entry_point_prefix": "castep", + "pip_url": "aiida-castep", + "plugin_info": "https://gitlab.com/bz1/aiida-castep/raw/master/setup.json", + "name": "aiida-castep", + "package_name": "aiida_castep", + "hosted_on": "gitlab.com", + "metadata": { + "description": "AiiDA plugin for CASTEP", + "author": "Bonan Zhu", + "author_email": "zhubonan@outlook.com", + "license": "MIT License", + "home_page": "https://github.com/zhubonan/aiida-castep", + "classifiers": [ + "Framework :: AiiDA", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ], + "version": "2.0.1" + }, + "aiida_version": ">=2.0,<3.0", + "entry_points": { + "aiida.calculations": { + "castep.castep": { + "description": [ + "Class representing a generic CASTEP calculation -", + " This class should work for all types of calculations." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "A node that defines the input parameters" + }, + { + "name": "pseudos", + "required": true, + "valid_types": "", + "info": "Use nodes for the pseudopotentails of one ofthe element in the structure. You should pass aa dictionary specifying the pseudpotential node foreach kind such as {O: }" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure" + }, + { + "name": "bs_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: bandstructure" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "elnes_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: elnes" + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Use a node defining the kpoints for the calculation" + }, + { + "name": "magres_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: magres" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "optics_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: optics" + }, + { + "name": "parent_calc_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Use a remote folder as the parent folder. Useful for restarts." + }, + { + "name": "phonon_fine_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: phonon, phonon+efield" + }, + { + "name": "phonon_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: phonon, phonon+efield" + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "A node for additional settings" + }, + { + "name": "spectral_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: spectral" + }, + { + "name": "supercell_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: phonon" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "Parsed results in a dictionary format." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "Calculation terminated gracefully, end found" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 101, + "message": "SCF Cycles failed to reach convergence" + }, + { + "status": 103, + "message": "Stopped execuation due to detection of 'stop ' keyword in param file." + }, + { + "status": 104, + "message": "CASTEP generate error files. Check them for details" + }, + { + "status": 105, + "message": "Cannot find the end of calculation" + }, + { + "status": 106, + "message": "No output .castep files found" + }, + { + "status": 107, + "message": "Calculation self-terminated due to time limit" + }, + { + "status": 108, + "message": "No retrieve folder is found" + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 200, + "message": "UNKOWN ERROR" + }, + { + "status": 501, + "message": "At least one kpoints/spin has no empty bands - please rerun with increased nextra_bands." + } + ] + }, + "class": "aiida_castep.calculations.castep:CastepCalculation" + }, + "castep.ts": { + "description": [ + "CASTEP calculation for transition state search. Use an extra input product structure." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "A node that defines the input parameters" + }, + { + "name": "product_structure", + "required": true, + "valid_types": "StructureData", + "info": "Product structure for transition state search." + }, + { + "name": "pseudos", + "required": true, + "valid_types": "", + "info": "Use nodes for the pseudopotentails of one ofthe element in the structure. You should pass aa dictionary specifying the pseudpotential node foreach kind such as {O: }" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure" + }, + { + "name": "bs_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: bandstructure" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "elnes_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: elnes" + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Use a node defining the kpoints for the calculation" + }, + { + "name": "magres_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: magres" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "optics_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: optics" + }, + { + "name": "parent_calc_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Use a remote folder as the parent folder. Useful for restarts." + }, + { + "name": "phonon_fine_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: phonon, phonon+efield" + }, + { + "name": "phonon_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: phonon, phonon+efield" + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "A node for additional settings" + }, + { + "name": "spectral_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: spectral" + }, + { + "name": "supercell_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Extra kpoints input for task: phonon" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "Parsed results in a dictionary format." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "Calculation terminated gracefully, end found" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 101, + "message": "SCF Cycles failed to reach convergence" + }, + { + "status": 103, + "message": "Stopped execuation due to detection of 'stop ' keyword in param file." + }, + { + "status": 104, + "message": "CASTEP generate error files. Check them for details" + }, + { + "status": 105, + "message": "Cannot find the end of calculation" + }, + { + "status": 106, + "message": "No output .castep files found" + }, + { + "status": 107, + "message": "Calculation self-terminated due to time limit" + }, + { + "status": 108, + "message": "No retrieve folder is found" + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 200, + "message": "UNKOWN ERROR" + }, + { + "status": 501, + "message": "At least one kpoints/spin has no empty bands - please rerun with increased nextra_bands." + } + ] + }, + "class": "aiida_castep.calculations.castep:CastepTSCalculation" + } + }, + "aiida.cmdline.data": { + "castep-helper": "aiida_castep.cmdline.helper_cmd:helper_cmd", + "castep-pseudos": "aiida_castep.cmdline.otfg_cmd:pseudos_cmd" + }, + "aiida.data": { + "castep.otfgdata": "aiida_castep.data.otfg:OTFGData", + "castep.uspdata": "aiida_castep.data.usp:UspData" + }, + "aiida.groups": { + "castep.otfg": "aiida_castep.data.otfg:OTFGGroup" + }, + "aiida.parsers": { + "castep.castep": "aiida_castep.parsers.castep:CastepParser" + }, + "aiida.tests": { + "castep.calculation": "aiida_castep.tests.dbtests.dbtestcalculation" + }, + "aiida.tools.calculations": { + "castep.castep": "aiida_castep.calculations.tools:CastepCalcTools" + }, + "aiida.workflows": { + "castep.altrelax": { + "description": [ + "A relaxation workflow that alternates between fixed cell and unfixed cell", + " This is meidate the problem in CASTEP where if the cell is partially constraints", + " the convergence would be very slow.", + "", + " To overcome this problem, the structure should be relaxed with cell constraints", + " then restart with fixed cell and repeat.", + "", + " Following fields can be used in ``relax_options``", + "", + " :var_cell_iter_max: Maximum iterations in variable cell relaxation, default to 10", + "", + " :fix_cell_iter_max: Maximum iterations in fixed cell relaxation, default to 20" + ], + "spec": { + "inputs": [ + { + "name": "base", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "calc", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Structure to be used for relaxation." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Wether to clean the workdir of the calculations at the end of the workchain. The default is not performing any cleaning." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "relax_options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Options for relaxation." + } + ], + "outputs": [ + { + "name": "output_bands", + "required": true, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "" + }, + { + "name": "output_array", + "required": false, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The relaxed structure." + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "ArrayData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 101, + "message": "Subprocess lauched has failed in the relax stage" + }, + { + "status": 102, + "message": "Geometry optimisation is not converged but the maximum iteration is exceeded." + }, + { + "status": 201, + "message": "NO cell_constraints find in the input" + } + ] + }, + "class": "aiida_castep.workflows.relax:CastepAlterRelaxWorkChain" + }, + "castep.bands": { + "description": [ + "Workchain for running bands calculation.", + "", + " This workchain does the following:", + "", + " 1. Relax the structure if requested (eg. inputs passed to the relax namespace).", + " 2. Optionally: Do a SCF singlepoint calculation", + " 3. Do combined SCF + non-SCF calculation for bands and dos.", + "", + " Inputs must be passed for the SCF calculation (dispatched to bands and DOS),", + " others are optional.", + "", + " Input for bands and dos calculations are optional. However, if they are needed, the full list of inputs must", + " be passed. For the `parameters` node, one may choose to only specify those fields that need to be updated." + ], + "spec": { + "inputs": [ + { + "name": "scf", + "required": true, + "valid_types": "Data", + "info": "Inputs for SCF workchain, mandatory. Used as template for bands/dos if not supplied separately" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure" + }, + { + "name": "bands", + "required": false, + "valid_types": "Data", + "info": "Inputs for bands calculation, if needed" + }, + { + "name": "bands_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Explicit kpoints for the bands" + }, + { + "name": "bands_kpoints_distance", + "required": false, + "valid_types": "Float, NoneType", + "info": "Spacing for band distances, used by seekpath" + }, + { + "name": "clean_children_workdir", + "required": false, + "valid_types": "Str, NoneType", + "info": "What part of the called children to clean" + }, + { + "name": "dos", + "required": false, + "valid_types": "Data", + "info": "Inputs for DOS calculation, if needed" + }, + { + "name": "dos_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Kpoints for running DOS calculations" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "only_dos", + "required": false, + "valid_types": "", + "info": "Flag for running only DOS calculations" + }, + { + "name": "options", + "required": false, + "valid_types": "", + "info": "Options for this workchain. Supported keywords: dos_smearing, dos_npoints." + }, + { + "name": "relax", + "required": false, + "valid_types": "Data", + "info": "Inputs for Relaxation workchain, if needed" + }, + { + "name": "run_separate_scf", + "required": false, + "valid_types": "", + "info": "Flag for running a separate SCF calculation, default to False" + } + ], + "outputs": [ + { + "name": "band_structure", + "required": true, + "valid_types": "", + "info": "Computed band structure with labels" + }, + { + "name": "dos_bands", + "required": false, + "valid_types": "", + "info": "Bands from the DOS calculation" + }, + { + "name": "primitive_structure", + "required": false, + "valid_types": "", + "info": "Primitive structure used for band structure calculations" + }, + { + "name": "seekpath_parameters", + "required": false, + "valid_types": "", + "info": "Parameters used by seekpath" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 501, + "message": "Relaxation workchain failed" + }, + { + "status": 502, + "message": "SCF workchain failed" + }, + { + "status": 503, + "message": "Band structure workchain failed" + }, + { + "status": 504, + "message": "DOS workchain failed" + } + ] + }, + "class": "aiida_castep.workflows.bands:CastepBandsWorkChain" + }, + "castep.base": { + "description": [ + "A basic workchain for generic CASTEP calculations.", + " We try to handle erros such as walltime exceeded or SCF not converged" + ], + "spec": { + "inputs": [ + { + "name": "calc", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "calc_options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Options to be passed to calculations's metadata.options" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Wether to clean the workdir of the calculations or not, the default is not clean." + }, + { + "name": "continuation_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Use a remote folder as the parent folder. Useful for restarts." + }, + { + "name": "ensure_gamma_centering", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Ensure the kpoint grid is gamma centred." + }, + { + "name": "kpoints_spacing", + "required": false, + "valid_types": "Float, NoneType", + "info": "Kpoint spacing" + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of restarts" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Options specific to the workchain.Avaliable options: queue_wallclock_limit, use_castep_bin" + }, + { + "name": "pseudos_family", + "required": false, + "valid_types": "Str, NoneType", + "info": "Pseudopotential family to be used" + }, + { + "name": "reuse_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Use a remote folder as the parent folder. Useful for restarts." + } + ], + "outputs": [ + { + "name": "output_bands", + "required": true, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "" + }, + { + "name": "output_array", + "required": false, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "ArrayData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 200, + "message": "The maximum number of iterations has been exceeded" + }, + { + "status": 201, + "message": "The maximum length of the wallclocks has been exceeded" + }, + { + "status": 301, + "message": "CASTEP generated error files and is not recoverable" + }, + { + "status": 302, + "message": "Cannot reach SCF convergence despite restart efforts" + }, + { + "status": 400, + "message": "The stop flag has been put in the .param file to request termination of the calculation." + }, + { + "status": 900, + "message": "Input validate is failed" + }, + { + "status": 901, + "message": "Completed one iteration but found not calculation returned" + }, + { + "status": 1000, + "message": "Error is not known" + } + ] + }, + "class": "aiida_castep.workflows.base:CastepBaseWorkChain" + }, + "castep.relax": { + "description": [ + "WorkChain to relax structures.", + " Restart the relaxation calculation until the structure is fully relaxed.", + " Each CASTEP relaxation may finish without error with not fully relaxed structure", + " if the number of iteration is exceeded (*geom_max_iter*).", + " This workchain try to restart such calculations (wrapped in CastepBaseWorkChain)", + " until the structure is fully relaxed", + "", + " ``relax_options`` is a Dict of the options avaliable fields are:", + "", + " - restart_mode: mode of restart, choose from ``reuse`` (default), ``structure``,", + " ``continuation``.", + " - bypass: Bypass relaxation control - e.g. no checking of the convergence.", + " Can be used for doing singlepoint calculation." + ], + "spec": { + "inputs": [ + { + "name": "base", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "calc", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Structure to be used for relaxation." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Wether to clean the workdir of the calculations at the end of the workchain. The default is not performing any cleaning." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "relax_options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Options for relaxation." + } + ], + "outputs": [ + { + "name": "output_bands", + "required": true, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "" + }, + { + "name": "output_array", + "required": false, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The relaxed structure." + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "ArrayData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 101, + "message": "Subprocess lauched has failed in the relax stage" + }, + { + "status": 102, + "message": "Geometry optimisation is not converged but the maximum iteration is exceeded." + } + ] + }, + "class": "aiida_castep.workflows.relax:CastepRelaxWorkChain" + } + }, + "console_scripts": { + "castep.mock": "aiida_castep.cmdline.mock_castep:mock_castep" + } + }, + "commits_count": 10, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 2 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 2 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 4 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Data commands, Groups, Tests, ...)", + "count": 5 + } + ], + "pip_install_cmd": "pip install aiida-castep", + "is_installable": "True" + }, + "aiida-catmap": { + "code_home": "https://github.com/sudarshanv01/aiida-catmap", + "entry_point_prefix": "catmap", + "name": "aiida-catmap", + "package_name": "aiida_catmap", + "hosted_on": "github.com", + "metadata": { + "author": "Sudarshan Vijay", + "author_email": "vijays@fysik.dtu.dk", + "version": "0.2.0a0", + "description": "AiiDA package that interfaces with Kinetic modelling code CatMAP", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.1.0,<2.0.0", + "entry_points": { + "aiida.calculations": { + "catmap": "aiida_catmap.calculations.catmap:CatMAPCalculation" + }, + "aiida.parsers": { + "catmap": "aiida_catmap.parsers.catmap:CatMAPParser" + } + }, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "See source code repository." + }, + "aiida-catmat": { + "code_home": "https://github.com/pzarabadip/aiida-catmat", + "entry_point_prefix": "catmat", + "development_status": "beta", + "documentation_url": "https://aiida-catmat.readthedocs.io/", + "pip_url": "aiida-catmat", + "name": "aiida-catmat", + "package_name": "aiida_catmat", + "hosted_on": "github.com", + "metadata": { + "description": "Collection of AiiDA WorkChains Developed in Morgan Group", + "author": "Pezhman Zarabadi-Poor", + "author_email": "pzarabadip@gmail.com", + "license": "MIT License", + "home_page": "https://github.com/pzarabadip/aiida-catmat", + "classifiers": [ + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ], + "version": "1.0.0b0" + }, + "aiida_version": null, + "entry_points": { + "aiida.parsers": { + "vasp_base_parser": "aiida_catmat.parsers:VaspBaseParser" + }, + "aiida.workflows": { + "vasp.base": "aiida_catmat.workchains:VaspBaseWorkChain", + "catmat.vasp_multistage": "aiida_catmat.workchains:VaspMultiStageWorkChain", + "catmat.vasp_converge": "aiida_catmat.workchains:VaspConvergeWorkChain", + "catmat.vasp_catmat": "aiida_catmat.workchains:VaspCatMatWorkChain", + "catmat.vasp_multistage_ddec": "aiida_catmat.workchains:VaspMultiStageDdecWorkChain" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 5 + } + ], + "pip_install_cmd": "pip install --pre aiida-catmat", + "is_installable": "False" + }, + "aiida-ce": { + "code_home": "https://github.com/unkcpz/aiida-ce", + "development_status": "beta", + "entry_point_prefix": "ce", + "pip_url": "git+https://github.com/unkcpz/aiida-ce", + "name": "aiida-ce", + "package_name": "aiida_ce", + "hosted_on": "github.com", + "metadata": { + "author": "unkcpz", + "author_email": "morty.yu@yahoo.com", + "version": "0.1.0a0", + "description": "AiiDA plugin for running cluster expansion using icet.", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.0.0,<2.0.0", + "entry_points": { + "aiida.data": { + "ce": "aiida_ce.data:DiffParameters", + "ce.structures": "aiida_ce.data.structure_set:StructureSet", + "ce.cluster": "aiida_ce.data.cluster:ClusterSpaceData" + }, + "aiida.calculations": { + "ce.genenum": "aiida_ce.calculations.genenum:EnumCalculation", + "ce.gensqs": "aiida_ce.calculations.gensqs:SqsCalculation", + "ce.train": "aiida_ce.calculations.train:TrainCalculation" + }, + "aiida.parsers": { + "ce.genenum": "aiida_ce.parsers.genenum:EnumParser", + "ce.gensqs": "aiida_ce.parsers.gensqs:SqsParser", + "ce.train": "aiida_ce.parsers.train:TrainParser" + }, + "aiida.cmdline.data": { + "ce": "aiida_ce.cli:data_cli" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 3 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 3 + }, + { + "colorclass": "red", + "text": "Data", + "count": 3 + }, + { + "colorclass": "orange", + "text": "Other (Data commands)", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/unkcpz/aiida-ce", + "is_installable": "True" + }, + "aiida-champ": { + "code_home": "https://github.com/TREX-CoE/aiida-champ", + "development_status": "beta", + "documentation_url": "http://aiida-champ.readthedocs.io/", + "entry_point_prefix": "champ", + "pip_url": "aiida-champ", + "name": "aiida-champ", + "package_name": "aiida_champ", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin that wraps the vmc executable of CHAMP code for computing the total energy and much more stuff.", + "author": "Ravindra Shinde", + "author_email": "r.l.shinde@utwente.nl", + "license": "MIT", + "home_page": "https://github.com/neelravi/aiida-champ", + "classifiers": [ + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python" + ], + "version": "1.2.6" + }, + "aiida_version": null, + "entry_points": { + "aiida.data": { + "CHAMP": "aiida_champ.data:CHAMPParameters" + }, + "aiida.calculations": { + "CHAMP": { + "description": [ + "AiiDA calculation plugin wrapping the CHAMP's vmc executable.", + "", + " aiida-champ can be used to manage the workflow of a vmc/dmc calculation of the CHAMP code.", + "", + " Author :: Ravindra Shinde", + " Email :: r.l.shinde@utwente.nl" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "determinants", + "required": true, + "valid_types": "SinglefileData", + "info": "Input determinants file" + }, + { + "name": "filemain", + "required": true, + "valid_types": "SinglefileData", + "info": "Input File" + }, + { + "name": "molecule", + "required": true, + "valid_types": "SinglefileData", + "info": "Molecule structure File" + }, + { + "name": "ecp1", + "required": false, + "valid_types": "SinglefileData", + "info": "Input ECP file for atom type 1" + }, + { + "name": "ecp2", + "required": false, + "valid_types": "SinglefileData", + "info": "Input ECP file for atom type 2" + }, + { + "name": "jastrow", + "required": false, + "valid_types": "SinglefileData", + "info": "Input jastrow file" + }, + { + "name": "jastrowder", + "required": false, + "valid_types": "SinglefileData", + "info": "Input jastrowder file" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "numericalbasis1", + "required": false, + "valid_types": "SinglefileData", + "info": "Input numerical basis file atom 1" + }, + { + "name": "numericalbasis2", + "required": false, + "valid_types": "SinglefileData", + "info": "Input numerical basis file atom 2" + }, + { + "name": "numericalbasisinfo", + "required": false, + "valid_types": "SinglefileData", + "info": "Input numerical basis information file" + }, + { + "name": "orbitals", + "required": false, + "valid_types": "SinglefileData", + "info": "Input orbitals file" + }, + { + "name": "symmetry", + "required": false, + "valid_types": "SinglefileData", + "info": "Input symmetry file" + }, + { + "name": "trexio", + "required": false, + "valid_types": "SinglefileData", + "info": "Input trexio hdf5 file" + } + ], + "outputs": [ + { + "name": "Output", + "required": true, + "valid_types": "SinglefileData", + "info": "Output file of the VMC/DMC calculation" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "Energy", + "required": false, + "valid_types": "Float", + "info": "Output total energy of the VMC/DMC calculation" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 300, + "message": "Calculation did not produce all expected output files." + } + ] + }, + "class": "aiida_champ.calculations:CHAMPCalculation" + } + }, + "aiida.parsers": { + "CHAMP": "aiida_champ.parsers:CHAMPParser" + }, + "aiida.cmdline.data": { + "CHAMP": "aiida_champ.cli:data_cli" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Data commands)", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-champ", + "is_installable": "True" + }, + "aiida-codtools": { + "code_home": "https://github.com/aiidateam/aiida-codtools", + "documentation_url": "https://aiida-codtools.readthedocs.io/", + "entry_point_prefix": "codtools", + "pip_url": "aiida-codtools", + "plugin_info": "https://raw.githubusercontent.com/aiidateam/aiida-codtools/master/setup.json", + "name": "aiida-codtools", + "package_name": "aiida_codtools", + "hosted_on": "github.com", + "metadata": { + "description": "The Official AiiDA plugin for the cod-tools package.", + "author_email": "The AiiDA team ", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ], + "version": "3.1.0" + }, + "aiida_version": ">=2.1,<3.0", + "entry_points": { + "aiida.calculations": { + "codtools.cif_base": { + "description": [ + "Generic `CalcJob` implementation that can easily be extended to work with any of the `cod-tools` scripts." + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF to be processed." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Command line parameters." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "messages", + "required": false, + "valid_types": "Dict", + "info": "Warning and error messages returned by script." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Neither the output for the error file could be read from the retrieved folder." + }, + { + "status": 311, + "message": "The output file could not be read from the retrieved folder." + }, + { + "status": 312, + "message": "The error file could not be read from the retrieved folder." + }, + { + "status": 313, + "message": "The output file is empty." + }, + { + "status": 320, + "message": "Invalid command line option passed." + }, + { + "status": 400, + "message": "The output file could not be parsed." + }, + { + "status": 410, + "message": "The output file could not be parsed into a CifData object." + } + ] + }, + "class": "aiida_codtools.calculations.cif_base:CifBaseCalculation" + }, + "codtools.cif_cell_contents": { + "description": [ + "CalcJob plugin for the `cif_cell_contents` script of the `cod-tools` package." + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF to be processed." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Command line parameters." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "formulae", + "required": true, + "valid_types": "Dict", + "info": "A dictionary of formulae present in the CIF." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "messages", + "required": false, + "valid_types": "Dict", + "info": "Warning and error messages returned by script." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Neither the output for the error file could be read from the retrieved folder." + }, + { + "status": 311, + "message": "The output file could not be read from the retrieved folder." + }, + { + "status": 312, + "message": "The error file could not be read from the retrieved folder." + }, + { + "status": 313, + "message": "The output file is empty." + }, + { + "status": 320, + "message": "Invalid command line option passed." + }, + { + "status": 400, + "message": "The output file could not be parsed." + }, + { + "status": 410, + "message": "The output file could not be parsed into a CifData object." + } + ] + }, + "class": "aiida_codtools.calculations.cif_cell_contents:CifCellContentsCalculation" + }, + "codtools.cif_cod_check": { + "description": [ + "CalcJob plugin for the `cif_cod_check` script of the `cod-tools` package." + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF to be processed." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Command line parameters." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "messages", + "required": true, + "valid_types": "Dict", + "info": "Warning and error messages returned by the script." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Neither the output for the error file could be read from the retrieved folder." + }, + { + "status": 311, + "message": "The output file could not be read from the retrieved folder." + }, + { + "status": 312, + "message": "The error file could not be read from the retrieved folder." + }, + { + "status": 313, + "message": "The output file is empty." + }, + { + "status": 320, + "message": "Invalid command line option passed." + }, + { + "status": 400, + "message": "The output file could not be parsed." + }, + { + "status": 410, + "message": "The output file could not be parsed into a CifData object." + } + ] + }, + "class": "aiida_codtools.calculations.cif_cod_check:CifCodCheckCalculation" + }, + "codtools.cif_cod_deposit": { + "description": [ + "CalcJob plugin for the `cif_cod_deposit` script of the `cod-tools` package." + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF to be processed." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Command line parameters." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "messages", + "required": false, + "valid_types": "Dict", + "info": "Warning and error messages returned by script." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Neither the output for the error file could be read from the retrieved folder." + }, + { + "status": 300, + "message": "The deposition failed for unknown reasons." + }, + { + "status": 310, + "message": "The deposition failed because the input was invalid." + }, + { + "status": 311, + "message": "The output file could not be read from the retrieved folder." + }, + { + "status": 312, + "message": "The error file could not be read from the retrieved folder." + }, + { + "status": 313, + "message": "The output file is empty." + }, + { + "status": 320, + "message": "Invalid command line option passed." + }, + { + "status": 400, + "message": "The output file could not be parsed." + }, + { + "status": 410, + "message": "The output file could not be parsed into a CifData object." + }, + { + "status": 410, + "message": "The deposition failed because one or more CIFs already exist in the COD." + }, + { + "status": 420, + "message": "The structure is unchanged and so deposition is unnecessary." + } + ] + }, + "class": "aiida_codtools.calculations.cif_cod_deposit:CifCodDepositCalculation" + }, + "codtools.cif_cod_numbers": { + "description": [ + "CalcJob plugin for the `cif_cod_numbers` script of the `cod-tools` package." + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF to be processed." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Command line parameters." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "numbers", + "required": true, + "valid_types": "Dict", + "info": "Mapping of COD IDs found with their formula and count." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "messages", + "required": false, + "valid_types": "Dict", + "info": "Warning and error messages returned by script." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Neither the output for the error file could be read from the retrieved folder." + }, + { + "status": 311, + "message": "The output file could not be read from the retrieved folder." + }, + { + "status": 312, + "message": "The error file could not be read from the retrieved folder." + }, + { + "status": 313, + "message": "The output file is empty." + }, + { + "status": 320, + "message": "Invalid command line option passed." + }, + { + "status": 400, + "message": "The output file could not be parsed." + }, + { + "status": 410, + "message": "The output file could not be parsed into a CifData object." + } + ] + }, + "class": "aiida_codtools.calculations.cif_cod_numbers:CifCodNumbersCalculation" + }, + "codtools.cif_filter": { + "description": [ + "CalcJob plugin for the `cif_filter` script of the `cod-tools` package." + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF to be processed." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Command line parameters." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF produced by the script." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "messages", + "required": false, + "valid_types": "Dict", + "info": "Warning and error messages returned by script." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Neither the output for the error file could be read from the retrieved folder." + }, + { + "status": 311, + "message": "The output file could not be read from the retrieved folder." + }, + { + "status": 312, + "message": "The error file could not be read from the retrieved folder." + }, + { + "status": 313, + "message": "The output file is empty." + }, + { + "status": 320, + "message": "Invalid command line option passed." + }, + { + "status": 400, + "message": "The output file could not be parsed." + }, + { + "status": 410, + "message": "The output file could not be parsed into a CifData object." + } + ] + }, + "class": "aiida_codtools.calculations.cif_filter:CifFilterCalculation" + }, + "codtools.cif_select": { + "description": [ + "CalcJob plugin for the `cif_select` script of the `cod-tools` package." + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF to be processed." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Command line parameters." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF produced by the script." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "messages", + "required": false, + "valid_types": "Dict", + "info": "Warning and error messages returned by script." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Neither the output for the error file could be read from the retrieved folder." + }, + { + "status": 311, + "message": "The output file could not be read from the retrieved folder." + }, + { + "status": 312, + "message": "The error file could not be read from the retrieved folder." + }, + { + "status": 313, + "message": "The output file is empty." + }, + { + "status": 320, + "message": "Invalid command line option passed." + }, + { + "status": 400, + "message": "The output file could not be parsed." + }, + { + "status": 410, + "message": "The output file could not be parsed into a CifData object." + } + ] + }, + "class": "aiida_codtools.calculations.cif_select:CifSelectCalculation" + }, + "codtools.cif_split_primitive": { + "description": [ + "CalcJob plugin for the `cif_split_primitive` script of the `cod-tools` package." + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CIF to be processed." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Command line parameters." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "cifs", + "required": true, + "valid_types": "CifData", + "info": "The CIFs produced by the script." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "messages", + "required": false, + "valid_types": "Dict", + "info": "Warning and error messages returned by script." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Neither the output for the error file could be read from the retrieved folder." + }, + { + "status": 311, + "message": "The output file could not be read from the retrieved folder." + }, + { + "status": 312, + "message": "The error file could not be read from the retrieved folder." + }, + { + "status": 313, + "message": "The output file is empty." + }, + { + "status": 320, + "message": "Invalid command line option passed." + }, + { + "status": 400, + "message": "The output file could not be parsed." + }, + { + "status": 410, + "message": "The output file could not be parsed into a CifData object." + } + ] + }, + "class": "aiida_codtools.calculations.cif_split_primitive:CifSplitPrimitiveCalculation" + }, + "codtools.primitive_structure_from_cif": { + "description": [ + "Attempt to parse the given `CifData` and create a `StructureData` from it.", + "", + " First the raw CIF file is parsed with the given `parse_engine`. The resulting `StructureData` is then passed through", + " SeeKpath to try and get the primitive cell. If that is successful, important structural parameters as determined by", + " SeeKpath will be set as extras on the structure node which is then returned as output.", + "", + " :param cif: the `CifData` node", + " :param parse_engine: the parsing engine, supported libraries 'ase' and 'pymatgen'", + " :param symprec: a `Float` node with symmetry precision for determining primitive cell in SeeKpath", + " :param site_tolerance: a `Float` node with the fractional coordinate distance tolerance for finding overlapping", + " sites. This will only be used if the parse_engine is pymatgen", + " :return: the primitive `StructureData` as determined by SeeKpath" + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "Data", + "info": "the `CifData` node" + }, + { + "name": "parse_engine", + "required": true, + "valid_types": "Data", + "info": "the parsing engine, supported libraries 'ase' and 'pymatgen'" + }, + { + "name": "site_tolerance", + "required": true, + "valid_types": "Data", + "info": "a `Float` node with the fractional coordinate distance tolerance for finding overlapping\nsites. This will only be used if the parse_engine is pymatgen" + }, + { + "name": "symprec", + "required": true, + "valid_types": "Data", + "info": "a `Float` node with symmetry precision for determining primitive cell in SeeKpath" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_codtools.calculations.functions.primitive_structure_from_cif:primitive_structure_from_cif" + } + }, + "aiida.parsers": { + "codtools.cif_base": "aiida_codtools.parsers.cif_base:CifBaseParser", + "codtools.cif_cell_contents": "aiida_codtools.parsers.cif_cell_contents:CifCellContentsParser", + "codtools.cif_cod_check": "aiida_codtools.parsers.cif_cod_check:CifCodCheckParser", + "codtools.cif_cod_deposit": "aiida_codtools.parsers.cif_cod_deposit:CifCodDepositParser", + "codtools.cif_cod_numbers": "aiida_codtools.parsers.cif_cod_numbers:CifCodNumbersParser", + "codtools.cif_split_primitive": "aiida_codtools.parsers.cif_split_primitive:CifSplitPrimitiveParser" + }, + "aiida.workflows": { + "codtools.cif_clean": { + "description": [ + "WorkChain to clean a `CifData` node using the `cif_filter` and `cif_select` scripts of `cod-tools`.", + "", + " It will first run `cif_filter` to correct syntax errors, followed by `cif_select` which will canonicalize the tags.", + " If a group is passed for the `group_structure` input, the atomic structure library defined by the `engine` input", + " will be used to parse the final cleaned `CifData` to construct a `StructureData` object, which will then be passed", + " to the `SeeKpath` library to analyze it and return the primitive structure" + ], + "spec": { + "inputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The CifData node that is to be cleaned." + }, + { + "name": "cif_filter", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "cif_select", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "group_cif", + "required": false, + "valid_types": "Group, NoneType", + "info": "An optional Group to which the final cleaned CifData node will be added." + }, + { + "name": "group_structure", + "required": false, + "valid_types": "Group, NoneType", + "info": "An optional Group to which the final reduced StructureData node will be added." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "parse_engine", + "required": false, + "valid_types": "Str", + "info": "The atomic structure engine to parse the cif and create the structure." + }, + { + "name": "site_tolerance", + "required": false, + "valid_types": "Float", + "info": "The fractional coordinate distance tolerance for finding overlapping sites (pymatgen only)." + }, + { + "name": "symprec", + "required": false, + "valid_types": "Float", + "info": "The symmetry precision used by SeeKpath for crystal symmetry refinement." + } + ], + "outputs": [ + { + "name": "cif", + "required": true, + "valid_types": "CifData", + "info": "The cleaned CifData node." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The primitive cell structure created with SeeKpath from the cleaned CifData." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 401, + "message": "The CifFilterCalculation step failed." + }, + { + "status": 402, + "message": "The CifSelectCalculation step failed." + }, + { + "status": 410, + "message": "The cleaned CifData contains sites with unknown species." + }, + { + "status": 411, + "message": "The cleaned CifData defines no atomic sites." + }, + { + "status": 412, + "message": "The cleaned CifData defines sites with attached hydrogens with incomplete positional data." + }, + { + "status": 413, + "message": "The cleaned CifData defines sites with invalid atomic occupancies." + }, + { + "status": 414, + "message": "Failed to parse a StructureData from the cleaned CifData." + }, + { + "status": 420, + "message": "SeeKpath failed to determine the primitive structure." + }, + { + "status": 421, + "message": "SeeKpath detected inconsistent symmetry operations." + } + ] + }, + "class": "aiida_codtools.workflows.cif_clean:CifCleanWorkChain" + } + }, + "console_scripts": { + "aiida-codtools": "aiida_codtools.cli:cmd_root" + } + }, + "commits_count": 4, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 9 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 6 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-codtools", + "is_installable": "True" + }, + "aiida-core": { + "code_home": "https://github.com/aiidateam/aiida-core", + "development_status": "stable", + "documentation_url": "https://aiida-core.readthedocs.io/", + "entry_point_prefix": "", + "package_name": "aiida", + "pip_url": "aiida-core", + "plugin_info": "https://raw.githubusercontent.com/aiidateam/aiida-core/master/setup.json", + "name": "aiida-core", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA is a workflow manager for computational science with a strong focus on provenance, performance and extensibility.", + "author_email": "The AiiDA team ", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering" + ], + "version": "2.4.0" + }, + "aiida_version": "==2.4.0", + "entry_points": { + "aiida.calculations": { + "core.arithmetic.add": { + "description": [ + "`CalcJob` implementation to add two numbers using bash for testing and demonstration purposes." + ], + "spec": { + "inputs": [ + { + "name": "x", + "required": true, + "valid_types": "Int, Float", + "info": "The left operand." + }, + { + "name": "y", + "required": true, + "valid_types": "Int, Float", + "info": "The right operand." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "sum", + "required": true, + "valid_types": "Int, Float", + "info": "The sum of the left and right operand." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 310, + "message": "The output file could not be read." + }, + { + "status": 320, + "message": "The output file contains invalid output." + }, + { + "status": 410, + "message": "The sum of the operands is a negative number." + } + ] + }, + "class": "aiida.calculations.arithmetic.add:ArithmeticAddCalculation" + }, + "core.templatereplacer": { + "description": [ + "Simple stub of a plugin that can be used to replace some text in a given template.", + " Can be used for many different codes, or as a starting point to develop a new plugin.", + "", + " This simple plugin takes two node inputs, both of type Dict, with the labels", + " 'parameters' and 'template'", + "", + " You can also add other SinglefileData nodes as input, that will be copied according to", + " what is written in 'template' (see below).", + "", + " * parameters: a set of parameters that will be used for substitution.", + "", + " * template: can contain the following parameters:", + "", + " * input_file_template: a string with substitutions to be managed with the format()", + " function of python, i.e. if you want to substitute a variable called 'varname', you write", + " {varname} in the text. See http://www.python.org/dev/peps/pep-3101/ for more", + " details. The replaced file will be the input file.", + "", + " * input_file_name: a string with the file name for the input. If it is not provided, no", + " file will be created.", + "", + " * output_file_name: a string with the file name for the output. If it is not provided, no", + " redirection will be done and the output will go in the scheduler output file.", + "", + " * cmdline_params: a list of strings, to be passed as command line parameters.", + " Each one is substituted with the same rule of input_file_template. Optional", + "", + " * input_through_stdin: if True, the input file name is passed via stdin. Default is False if missing.", + "", + " * files_to_copy: if defined, a list of tuple pairs, with format ('link_name', 'dest_rel_path');", + " for each tuple, an input link to this calculation is looked for, with link labeled 'link_label',", + " and with file type 'Singlefile', and the content is copied to a remote file named 'dest_rel_path'", + " Errors are raised in the input links are non-existent, or of the wrong type, or if there are", + " unused input files.", + "", + " * retrieve_temporary_files: a list of relative filepaths, that if defined, will be retrieved and", + " temporarily stored in an unstored FolderData node that will be available during the", + " Parser.parser_with_retrieved call under the key specified by the Parser.retrieved_temporary_folder key" + ], + "spec": { + "inputs": [ + { + "name": "template", + "required": true, + "valid_types": "Dict", + "info": "A template for the input file." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "files", + "required": false, + "valid_types": "RemoteData, SinglefileData", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters used to replace placeholders in the template." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 301, + "message": "The temporary retrieved folder data node could not be accessed." + }, + { + "status": 305, + "message": "The `template` input node did not specify the key `output_file_name`." + }, + { + "status": 310, + "message": "The output file could not be read from the retrieved folder." + }, + { + "status": 311, + "message": "A temporary retrieved file could not be read from the temporary retrieved folder." + }, + { + "status": 320, + "message": "The output file contains invalid output." + } + ] + }, + "class": "aiida.calculations.templatereplacer:TemplatereplacerCalculation" + }, + "core.transfer": { + "description": [ + "Utility to copy files from different FolderData and RemoteData nodes into a single place.", + "", + " The final destination for these files can be either the local repository (by creating a", + " new FolderData node to store them) or in the remote computer (by leaving the files in a", + " new remote folder saved in a RemoteData node).", + "", + " Only files from the local computer and from remote folders in the same external computer", + " can be moved at the same time with a single instance of this CalcJob.", + "", + " The user needs to provide three inputs:", + "", + " * ``instructions``: a dict node specifying which files to copy from which nodes.", + " * ``source_nodes``: a dict of nodes, each with a unique identifier label as its key.", + " * ``metadata.computer``: the computer that contains the remote files and will contain", + " the final RemoteData node.", + "", + " The ``instructions`` dict must have the ``retrieve_files`` flag. The CalcJob will create a", + " new folder in the remote machine (``RemoteData``) and put all the files there and will either:", + "", + " (1) leave them there (``retrieve_files = False``) or ...", + " (2) retrieve all the files and store them locally in a ``FolderData`` (``retrieve_files = True``)", + "", + " The `instructions` dict must also contain at least one list with specifications of which files", + " to copy and from where. All these lists take tuples of 3 that have the following format:", + "", + " .. code-block:: python", + "", + " ( source_node_key, path_to_file_in_source, path_to_file_in_target)", + "", + " where the ``source_node_key`` has to be the respective one used when providing the node in the", + " ``source_nodes`` input nodes dictionary.", + "", + "", + " The two main lists to include are ``local_files`` (for files to be taken from FolderData nodes)", + " and ``remote_files`` (for files to be taken from RemoteData nodes). Alternatively, files inside", + " of RemoteData nodes can instead be put in the ``symlink_files`` list: the only difference is that", + " files from the first list will be fully copied in the target RemoteData folder, whereas for the", + " files in second list only a symlink to the original file will be created there. This will only", + " affect the content of the final RemoteData target folder, but in both cases the full file will", + " be copied back in the local target FolderData (if ``retrieve_files = True``)." + ], + "spec": { + "inputs": [ + { + "name": "instructions", + "required": true, + "valid_types": "Dict", + "info": "A dictionary containing the `retrieve_files` flag and at least one of the file lists:`local_files`, `remote_files` and/or `symlink_files`." + }, + { + "name": "source_nodes", + "required": true, + "valid_types": "FolderData, RemoteData", + "info": "All the nodes that contain files referenced in the instructions." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + } + ] + }, + "class": "aiida.calculations.transfer:TransferCalculation" + } + }, + "aiida.calculations.importers": { + "core.arithmetic.add": "aiida.calculations.importers.arithmetic.add:ArithmeticAddCalculationImporter" + }, + "aiida.calculations.monitors": { + "core.always_kill": "aiida.calculations.monitors.base:always_kill" + }, + "aiida.cmdline.computer.configure": { + "core.local": "aiida.transports.plugins.local:CONFIGURE_LOCAL_CMD", + "core.ssh": "aiida.transports.plugins.ssh:CONFIGURE_SSH_CMD" + }, + "aiida.cmdline.data": { + "core.array": "aiida.cmdline.commands.cmd_data.cmd_array:array", + "core.bands": "aiida.cmdline.commands.cmd_data.cmd_bands:bands", + "core.cif": "aiida.cmdline.commands.cmd_data.cmd_cif:cif", + "core.dict": "aiida.cmdline.commands.cmd_data.cmd_dict:dictionary", + "core.remote": "aiida.cmdline.commands.cmd_data.cmd_remote:remote", + "core.singlefile": "aiida.cmdline.commands.cmd_data.cmd_singlefile:singlefile", + "core.structure": "aiida.cmdline.commands.cmd_data.cmd_structure:structure", + "core.trajectory": "aiida.cmdline.commands.cmd_data.cmd_trajectory:trajectory", + "core.upf": "aiida.cmdline.commands.cmd_data.cmd_upf:upf" + }, + "aiida.cmdline.data.structure.import": {}, + "aiida.data": { + "core.array": "aiida.orm.nodes.data.array.array:ArrayData", + "core.array.bands": "aiida.orm.nodes.data.array.bands:BandsData", + "core.array.kpoints": "aiida.orm.nodes.data.array.kpoints:KpointsData", + "core.array.projection": "aiida.orm.nodes.data.array.projection:ProjectionData", + "core.array.trajectory": "aiida.orm.nodes.data.array.trajectory:TrajectoryData", + "core.array.xy": "aiida.orm.nodes.data.array.xy:XyData", + "core.base": "aiida.orm.nodes.data:BaseType", + "core.bool": "aiida.orm.nodes.data.bool:Bool", + "core.cif": "aiida.orm.nodes.data.cif:CifData", + "core.code": "aiida.orm.nodes.data.code.legacy:Code", + "core.code.containerized": "aiida.orm.nodes.data.code.containerized:ContainerizedCode", + "core.code.installed": "aiida.orm.nodes.data.code.installed:InstalledCode", + "core.code.portable": "aiida.orm.nodes.data.code.portable:PortableCode", + "core.dict": "aiida.orm.nodes.data.dict:Dict", + "core.enum": "aiida.orm.nodes.data.enum:EnumData", + "core.float": "aiida.orm.nodes.data.float:Float", + "core.folder": "aiida.orm.nodes.data.folder:FolderData", + "core.int": "aiida.orm.nodes.data.int:Int", + "core.jsonable": "aiida.orm.nodes.data.jsonable:JsonableData", + "core.list": "aiida.orm.nodes.data.list:List", + "core.numeric": "aiida.orm.nodes.data.numeric:NumericType", + "core.orbital": "aiida.orm.nodes.data.orbital:OrbitalData", + "core.remote": "aiida.orm.nodes.data.remote.base:RemoteData", + "core.remote.stash": "aiida.orm.nodes.data.remote.stash.base:RemoteStashData", + "core.remote.stash.folder": "aiida.orm.nodes.data.remote.stash.folder:RemoteStashFolderData", + "core.singlefile": "aiida.orm.nodes.data.singlefile:SinglefileData", + "core.str": "aiida.orm.nodes.data.str:Str", + "core.structure": "aiida.orm.nodes.data.structure:StructureData", + "core.upf": "aiida.orm.nodes.data.upf:UpfData" + }, + "aiida.groups": { + "core": "aiida.orm.groups:Group", + "core.auto": "aiida.orm.groups:AutoGroup", + "core.import": "aiida.orm.groups:ImportGroup", + "core.upf": "aiida.orm.groups:UpfFamily" + }, + "aiida.node": { + "data": "aiida.orm.nodes.data.data:Data", + "process": "aiida.orm.nodes.process.process:ProcessNode", + "process.calculation": "aiida.orm.nodes.process.calculation.calculation:CalculationNode", + "process.calculation.calcfunction": "aiida.orm.nodes.process.calculation.calcfunction:CalcFunctionNode", + "process.calculation.calcjob": "aiida.orm.nodes.process.calculation.calcjob:CalcJobNode", + "process.workflow": "aiida.orm.nodes.process.workflow.workflow:WorkflowNode", + "process.workflow.workchain": "aiida.orm.nodes.process.workflow.workchain:WorkChainNode", + "process.workflow.workfunction": "aiida.orm.nodes.process.workflow.workfunction:WorkFunctionNode" + }, + "aiida.parsers": { + "core.arithmetic.add": "aiida.parsers.plugins.arithmetic.add:ArithmeticAddParser", + "core.templatereplacer": "aiida.parsers.plugins.templatereplacer.parser:TemplatereplacerParser" + }, + "aiida.schedulers": { + "core.direct": "aiida.schedulers.plugins.direct:DirectScheduler", + "core.lsf": "aiida.schedulers.plugins.lsf:LsfScheduler", + "core.pbspro": "aiida.schedulers.plugins.pbspro:PbsproScheduler", + "core.sge": "aiida.schedulers.plugins.sge:SgeScheduler", + "core.slurm": "aiida.schedulers.plugins.slurm:SlurmScheduler", + "core.torque": "aiida.schedulers.plugins.torque:TorqueScheduler" + }, + "aiida.storage": { + "core.psql_dos": "aiida.storage.psql_dos.backend:PsqlDosBackend", + "core.sqlite_temp": "aiida.storage.sqlite_temp.backend:SqliteTempBackend", + "core.sqlite_zip": "aiida.storage.sqlite_zip.backend:SqliteZipBackend" + }, + "aiida.tools.calculations": {}, + "aiida.tools.data.orbitals": { + "core.orbital": "aiida.tools.data.orbital.orbital:Orbital", + "core.realhydrogen": "aiida.tools.data.orbital.realhydrogen:RealhydrogenOrbital" + }, + "aiida.tools.dbexporters": {}, + "aiida.tools.dbimporters": { + "core.cod": "aiida.tools.dbimporters.plugins.cod:CodDbImporter", + "core.icsd": "aiida.tools.dbimporters.plugins.icsd:IcsdDbImporter", + "core.materialsproject": "aiida.tools.dbimporters.plugins.materialsproject:MaterialsProjectImporter", + "core.mpds": "aiida.tools.dbimporters.plugins.mpds:MpdsDbImporter", + "core.mpod": "aiida.tools.dbimporters.plugins.mpod:MpodDbImporter", + "core.nninc": "aiida.tools.dbimporters.plugins.nninc:NnincDbImporter", + "core.oqmd": "aiida.tools.dbimporters.plugins.oqmd:OqmdDbImporter", + "core.pcod": "aiida.tools.dbimporters.plugins.pcod:PcodDbImporter", + "core.tcod": "aiida.tools.dbimporters.plugins.tcod:TcodDbImporter" + }, + "aiida.transports": { + "core.local": "aiida.transports.plugins.local:LocalTransport", + "core.ssh": "aiida.transports.plugins.ssh:SshTransport" + }, + "aiida.workflows": { + "core.arithmetic.add_multiply": { + "description": [ + "Add two numbers and multiply it with a third." + ], + "spec": { + "inputs": [ + { + "name": "x", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "y", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "z", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida.workflows.arithmetic.add_multiply:add_multiply" + }, + "core.arithmetic.multiply_add": { + "description": [ + "WorkChain to multiply two numbers and add a third, for testing and demonstration purposes." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "AbstractCode", + "info": "" + }, + { + "name": "x", + "required": true, + "valid_types": "Int", + "info": "" + }, + { + "name": "y", + "required": true, + "valid_types": "Int", + "info": "" + }, + { + "name": "z", + "required": true, + "valid_types": "Int", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "result", + "required": true, + "valid_types": "Int", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 400, + "message": "The result is a negative number." + } + ] + }, + "class": "aiida.workflows.arithmetic.multiply_add:MultiplyAddWorkChain" + } + }, + "console_scripts": { + "runaiida": "aiida.cmdline.commands.cmd_run:run", + "verdi": "aiida.cmdline.commands.cmd_verdi:verdi" + } + }, + "commits_count": 336, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 3 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 2 + }, + { + "colorclass": "red", + "text": "Data", + "count": 29 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 2 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 2 + }, + { + "colorclass": "orange", + "text": "Other (Calculations importers, Calculations monitors, Cmdline computer configure, ...)", + "count": 47 + } + ], + "pip_install_cmd": "pip install aiida-core", + "is_installable": "True" + }, + "aiida-cp2k": { + "code_home": "https://github.com/cp2k/aiida-cp2k", + "entry_point_prefix": "cp2k", + "pip_url": "aiida-cp2k", + "plugin_info": "https://raw.githubusercontent.com/cp2k/aiida-cp2k/master/setup.json", + "name": "aiida-cp2k", + "package_name": "aiida_cp2k", + "hosted_on": "github.com", + "metadata": { + "description": "The official AiiDA plugin for CP2K.", + "author": "The AiiDA team", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3" + ], + "version": "2.0.0" + }, + "aiida_version": ">=2.0.0,<3.0.0", + "entry_points": { + "aiida.calculations": { + "cp2k": { + "description": [ + "This is a Cp2kCalculation, subclass of JobCalculation, to prepare input for an ab-initio CP2K calculation.", + "", + " For information on CP2K, refer to: https://www.cp2k.org." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "The input parameters." + }, + { + "name": "basissets", + "required": false, + "valid_types": "", + "info": "A dictionary of basissets to be used in the calculations: key is the atomic symbol, value is either a single basisset or a list of basissets. If multiple basissets for a single symbol are passed, it is mandatory to specify a KIND section with a BASIS_SET keyword matching the names (or aliases) of the basissets." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "file", + "required": false, + "valid_types": "SinglefileData, StructureData", + "info": "Additional input files." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Input kpoint mesh." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parent_calc_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Working directory of a previously ran calculation to restart from." + }, + { + "name": "pseudos", + "required": false, + "valid_types": "", + "info": "A dictionary of pseudopotentials to be used in the calculations: key is the atomic symbol, value is either a single pseudopotential or a list of pseudopotentials. If multiple pseudos for a single symbol are passed, it is mandatory to specify a KIND section with a PSEUDOPOTENTIAL keyword matching the names (or aliases) of the pseudopotentials." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional input parameters." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData, NoneType", + "info": "The main input structure." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The output dictionary containing results of the calculation." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_bands", + "required": false, + "valid_types": "BandsData", + "info": "Computed electronic band structure." + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The relaxed output structure." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "The retrieved folder did not contain the required output file." + }, + { + "status": 301, + "message": "The output file could not be read." + }, + { + "status": 302, + "message": "The output file could not be parsed." + }, + { + "status": 303, + "message": "The output file was incomplete." + }, + { + "status": 304, + "message": "The output file contains the word \"ABORT\"." + }, + { + "status": 312, + "message": "The output structure could not be parsed." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception." + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 500, + "message": "The ionic minimization cycle did not converge for the given thresholds." + }, + { + "status": 501, + "message": "The maximum number of optimization steps reached." + } + ] + }, + "class": "aiida_cp2k.calculations:Cp2kCalculation" + } + }, + "aiida.parsers": { + "cp2k_advanced_parser": "aiida_cp2k.parsers:Cp2kAdvancedParser", + "cp2k_base_parser": "aiida_cp2k.parsers:Cp2kBaseParser", + "cp2k_tools_parser": "aiida_cp2k.parsers:Cp2kToolsParser" + }, + "aiida.workflows": { + "cp2k.base": { + "description": [ + "Workchain to run a CP2K calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "cp2k", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The output dictionary containing results of the calculation." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "final_input_parameters", + "required": false, + "valid_types": "Dict", + "info": "The input parameters used for the final calculation." + }, + { + "name": "output_bands", + "required": false, + "valid_types": "BandsData", + "info": "Computed electronic band structure." + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The relaxed output structure." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 300, + "message": "The calculation failed with an unidentified unrecoverable error." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 310, + "message": "The calculation failed with a known unrecoverable error." + }, + { + "status": 400, + "message": "The calculation didn't produce any data to restart from." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_cp2k.workchains:Cp2kBaseWorkChain" + } + } + }, + "commits_count": 18, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 3 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-cp2k", + "is_installable": "True" + }, + "aiida-crystal-dft": { + "code_home": "https://github.com/tilde-lab/aiida-crystal-dft", + "development_status": "beta", + "documentation_url": "https://github.com/tilde-lab/aiida-crystal-dft", + "entry_point_prefix": "crystal_dft", + "pip_url": "git+https://github.com/tilde-lab/aiida-crystal-dft", + "name": "aiida-crystal-dft", + "package_name": "aiida_crystal_dft", + "hosted_on": "github.com", + "metadata": { + "description": "Yet another AiiDA plugin for CRYSTAL code, mainly intended for use with the cloud infrastructures\n(currently, MPDS)", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Intended Audience :: Science/Research", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Scientific/Engineering :: Physics", + "Topic :: Scientific/Engineering :: Information Analysis" + ], + "author": "Andrey Sobolev, based on aiida-crystal17 plugin by Chris Sewell", + "author_email": "as@tilde.pro" + }, + "aiida_version": ">=2.0.2", + "entry_points": { + "aiida.data": { + "crystal_dft.basis": "aiida_crystal_dft.data.basis:CrystalBasisData", + "crystal_dft.basis_family": "aiida_crystal_dft.data.basis_family:CrystalBasisFamilyData" + }, + "aiida.calculations": { + "crystal_dft.serial": { + "description": [ + "No description available" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "basis", + "required": false, + "valid_types": "CrystalBasisData", + "info": "" + }, + { + "name": "basis_family", + "required": false, + "valid_types": "CrystalBasisFamilyData, NoneType", + "info": "" + }, + { + "name": "guess_oxistates", + "required": false, + "valid_types": "Bool, NoneType", + "info": "" + }, + { + "name": "high_spin_preferred", + "required": false, + "valid_types": "Bool, NoneType", + "info": "" + }, + { + "name": "is_magnetic", + "required": false, + "valid_types": "Bool, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "spinlock_steps", + "required": false, + "valid_types": "Int, NoneType", + "info": "" + }, + { + "name": "use_oxistates", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "oxidation_states", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "" + }, + { + "name": "output_wavefunction", + "required": false, + "valid_types": "SinglefileData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "SCF calculation not converged" + }, + { + "status": 301, + "message": "Geometry optimization failed" + }, + { + "status": 302, + "message": "Unit cell not neutral" + }, + { + "status": 303, + "message": "Basis set linearly dependent" + }, + { + "status": 304, + "message": "Neighbour list too large" + }, + { + "status": 305, + "message": "No G-vectors left" + }, + { + "status": 306, + "message": "Collapsed geometry" + }, + { + "status": 307, + "message": "Closed shell run - spin polarization not allowed" + }, + { + "status": 308, + "message": "Parameters for model hessian not defined" + }, + { + "status": 309, + "message": "Fermi energy not in interval" + }, + { + "status": 310, + "message": "Insufficient indices for Madelung sums" + }, + { + "status": 350, + "message": "Internal memory error" + }, + { + "status": 360, + "message": "Inadequate elastic calculation: additional optimization needed" + }, + { + "status": 400, + "message": "Unknown error" + }, + { + "status": 401, + "message": "The retrieved folder data node could not be accessed" + } + ] + }, + "class": "aiida_crystal_dft.calculations.serial:CrystalSerialCalculation" + }, + "crystal_dft.parallel": { + "description": [ + "No description available" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "basis", + "required": false, + "valid_types": "CrystalBasisData", + "info": "" + }, + { + "name": "basis_family", + "required": false, + "valid_types": "CrystalBasisFamilyData, NoneType", + "info": "" + }, + { + "name": "guess_oxistates", + "required": false, + "valid_types": "Bool, NoneType", + "info": "" + }, + { + "name": "high_spin_preferred", + "required": false, + "valid_types": "Bool, NoneType", + "info": "" + }, + { + "name": "is_magnetic", + "required": false, + "valid_types": "Bool, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "spinlock_steps", + "required": false, + "valid_types": "Int, NoneType", + "info": "" + }, + { + "name": "use_oxistates", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "oxidation_states", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "" + }, + { + "name": "output_wavefunction", + "required": false, + "valid_types": "SinglefileData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "SCF calculation not converged" + }, + { + "status": 301, + "message": "Geometry optimization failed" + }, + { + "status": 302, + "message": "Unit cell not neutral" + }, + { + "status": 303, + "message": "Basis set linearly dependent" + }, + { + "status": 304, + "message": "Neighbour list too large" + }, + { + "status": 305, + "message": "No G-vectors left" + }, + { + "status": 306, + "message": "Collapsed geometry" + }, + { + "status": 307, + "message": "Closed shell run - spin polarization not allowed" + }, + { + "status": 308, + "message": "Parameters for model hessian not defined" + }, + { + "status": 309, + "message": "Fermi energy not in interval" + }, + { + "status": 310, + "message": "Insufficient indices for Madelung sums" + }, + { + "status": 350, + "message": "Internal memory error" + }, + { + "status": 360, + "message": "Inadequate elastic calculation: additional optimization needed" + }, + { + "status": 400, + "message": "Unknown error" + }, + { + "status": 401, + "message": "The retrieved folder data node could not be accessed" + } + ] + }, + "class": "aiida_crystal_dft.calculations.parallel:CrystalParallelCalculation" + }, + "crystal_dft.properties": { + "description": [ + "AiiDA calculation plugin wrapping the properties executable." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "wavefunction", + "required": true, + "valid_types": "SinglefileData", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_bands", + "required": false, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "output_bands_down", + "required": false, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "output_dos", + "required": false, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The retrieved folder data node could not be accessed" + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + } + ] + }, + "class": "aiida_crystal_dft.calculations.properties:PropertiesCalculation" + } + }, + "aiida.parsers": { + "crystal_dft": "aiida_crystal_dft.parsers.cry_pycrystal:CrystalParser", + "crystal_dft.properties": "aiida_crystal_dft.parsers.properties:PropertiesParser" + }, + "aiida.workflows": { + "crystal_dft.base": { + "description": [ + "Run CRYSTAL calculation" + ], + "spec": { + "inputs": [ + { + "name": "basis_family", + "required": true, + "valid_types": "CrystalBasisFamilyData", + "info": "" + }, + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "options", + "required": true, + "valid_types": "Dict", + "info": "Calculation options" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "restart_params", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "" + }, + { + "name": "output_wavefunction", + "required": false, + "valid_types": "SinglefileData", + "info": "" + }, + { + "name": "oxidation_states", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "primitive_structure", + "required": false, + "valid_types": "StructureData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 300, + "message": "CRYSTAL error" + }, + { + "status": 400, + "message": "Unknown error" + } + ] + }, + "class": "aiida_crystal_dft.workflows.base:BaseCrystalWorkChain" + } + }, + "aiida.cmdline.data": { + "crystal_dft": "aiida_crystal_dft.cli.basis:basis_set" + } + }, + "commits_count": 19, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 3 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 2 + }, + { + "colorclass": "red", + "text": "Data", + "count": 2 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Data commands)", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/tilde-lab/aiida-crystal-dft", + "is_installable": "True" + }, + "aiida-crystal17": { + "code_home": "https://github.com/aiidaplugins/aiida-crystal17", + "development_status": "beta", + "documentation_url": "https://aiida-crystal17.readthedocs.io", + "entry_point_prefix": "crystal17", + "pip_url": "aiida-crystal17", + "plugin_info": "https://raw.githubusercontent.com/aiidaplugins/aiida-crystal17/master/setup.json", + "name": "aiida-crystal17", + "package_name": "aiida_crystal17", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for running the CRYSTAL17 code", + "author": "Chris Sewell", + "author_email": "chrisj_sewell@hotmail.com", + "license": "MIT", + "home_page": "https://github.com/chrisjsewell/aiida-crystal17", + "classifiers": [ + "Framework :: AiiDA", + "Programming Language :: Python", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.6", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "0.11.0" + }, + "aiida_version": ">=1.4.0,<2.0.0", + "entry_points": { + "aiida.calculations": { + "crystal17.basic": { + "description": [ + "AiiDA calculation plugin to run the crystal17 executable,", + " by supplying a normal .d12 input file and (optional) .gui file" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "input_file", + "required": true, + "valid_types": "SinglefileData", + "info": "the input .d12 file content." + }, + { + "name": "input_external", + "required": false, + "valid_types": "SinglefileData", + "info": "optional input fort.34 (gui) file content (for use with EXTERNAL keyword)." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "results", + "required": true, + "valid_types": "Dict", + "info": "the data extracted from the main output file" + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "the structure output from the calculation" + }, + { + "name": "symmetry", + "required": false, + "valid_types": "SymmetryData", + "info": "the symmetry data from the calculation" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "the main (stdout) output file was not found" + }, + { + "status": 211, + "message": "the temporary retrieved folder was not found" + }, + { + "status": 300, + "message": "An error was flagged trying to parse the crystal exec stdout file" + }, + { + "status": 301, + "message": "An error occurred parsing the 'opta'/'optc' geometry files" + }, + { + "status": 302, + "message": "The crystal exec stdout file denoted that the run was a testgeom" + }, + { + "status": 350, + "message": "the input file could not be read by CRYSTAL" + }, + { + "status": 351, + "message": "CRYSTAL could not find the required wavefunction file" + }, + { + "status": 352, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 353, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 354, + "message": "Error in CHEMOD basis set modification" + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 401, + "message": "The calculation stopped prematurely because it ran out of memory." + }, + { + "status": 402, + "message": "The calculation stopped prematurely because it ran out of virtual memory." + }, + { + "status": 411, + "message": "SCF convergence did not finalise (usually due to reaching step limit)" + }, + { + "status": 412, + "message": "Geometry convergence did not finalise (usually due to reaching step limit)" + }, + { + "status": 413, + "message": "an error encountered usually during geometry optimisation" + }, + { + "status": 414, + "message": "an error was encountered during an SCF computation" + }, + { + "status": 415, + "message": "an unknown error was encountered, causing the MPI to abort" + }, + { + "status": 499, + "message": "The main crystal output file flagged an unhandled error" + }, + { + "status": 510, + "message": "inconsistency in the input and output symmetry" + }, + { + "status": 520, + "message": "primitive symmops were not found in the output file" + } + ] + }, + "class": "aiida_crystal17.calculations.cry_basic:CryBasicCalculation" + }, + "crystal17.doss": { + "description": [ + "AiiDA calculation plugin to run the ``properties`` executable,", + " for DOSS calculations." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "the input parameters to create the properties input file." + }, + { + "name": "wf_folder", + "required": true, + "valid_types": "FolderData, RemoteData, SinglefileData", + "info": "the folder containing the wavefunction fort.9 file" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "results", + "required": true, + "valid_types": "Dict", + "info": "Summary Data extracted from the output file(s)" + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "arrays", + "required": false, + "valid_types": "ArrayData", + "info": "energies and DoS arrays" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "the main (stdout) output file was not found" + }, + { + "status": 211, + "message": "the temporary retrieved folder was not found" + }, + { + "status": 300, + "message": "An error was flagged trying to parse the crystal exec stdout file" + }, + { + "status": 350, + "message": "the input file could not be read by CRYSTAL" + }, + { + "status": 351, + "message": "CRYSTAL could not find the required wavefunction file" + }, + { + "status": 352, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 352, + "message": "parser could not find the output isovalue (fort.25) file" + }, + { + "status": 353, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 353, + "message": "error parsing output isovalue (fort.25) file" + }, + { + "status": 354, + "message": "Error in CHEMOD basis set modification" + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 401, + "message": "The calculation stopped prematurely because it ran out of memory." + }, + { + "status": 402, + "message": "The calculation stopped prematurely because it ran out of virtual memory." + }, + { + "status": 413, + "message": "an error encountered usually during geometry optimisation" + }, + { + "status": 414, + "message": "an error was encountered during an SCF computation" + }, + { + "status": 415, + "message": "an unknown error was encountered, causing the MPI to abort" + }, + { + "status": 499, + "message": "The main crystal output file flagged an unhandled error" + } + ] + }, + "class": "aiida_crystal17.calculations.prop_doss:CryDossCalculation" + }, + "crystal17.ech3": { + "description": [ + "AiiDA calculation plugin to run the ``properties`` executable, for 3D charge density (ECH3)." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "the input parameters to create the properties input file." + }, + { + "name": "wf_folder", + "required": true, + "valid_types": "FolderData, RemoteData, SinglefileData", + "info": "the folder containing the wavefunction fort.9 file" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "charge", + "required": true, + "valid_types": "GaussianCube", + "info": "The charge density cube" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "results", + "required": true, + "valid_types": "Dict", + "info": "Summary Data extracted from the output file(s)" + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "spin", + "required": false, + "valid_types": "GaussianCube", + "info": "The spin density cube" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "the main (stdout) output file was not found" + }, + { + "status": 211, + "message": "the temporary retrieved folder was not found" + }, + { + "status": 300, + "message": "An error was flagged trying to parse the crystal exec stdout file" + }, + { + "status": 350, + "message": "the input file could not be read by CRYSTAL" + }, + { + "status": 351, + "message": "CRYSTAL could not find the required wavefunction file" + }, + { + "status": 352, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 352, + "message": "parser could not find the output density file" + }, + { + "status": 353, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 353, + "message": "error parsing output density file" + }, + { + "status": 354, + "message": "Error in CHEMOD basis set modification" + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 401, + "message": "The calculation stopped prematurely because it ran out of memory." + }, + { + "status": 402, + "message": "The calculation stopped prematurely because it ran out of virtual memory." + }, + { + "status": 413, + "message": "an error encountered usually during geometry optimisation" + }, + { + "status": 414, + "message": "an error was encountered during an SCF computation" + }, + { + "status": 415, + "message": "an unknown error was encountered, causing the MPI to abort" + }, + { + "status": 499, + "message": "The main crystal output file flagged an unhandled error" + } + ] + }, + "class": "aiida_crystal17.calculations.prop_ech3:CryEch3Calculation" + }, + "crystal17.main": { + "description": [ + "AiiDA calculation plugin to run the crystal17 executable,", + " by supplying aiida nodes, with data sufficient to create the", + " .d12 input file and .gui file" + ], + "spec": { + "inputs": [ + { + "name": "basissets", + "required": true, + "valid_types": "BasisSetData", + "info": "Use a node for the basis set of one of the elements in the structure. You have to pass an additional parameter ('element') specifying the atomic element symbol for which you want to use this basis set." + }, + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "CryInputParamsData", + "info": "the input parameters to create the .d12 file content." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "structure used to construct the input fort.34 (gui) file" + }, + { + "name": "kinds", + "required": false, + "valid_types": "KindData", + "info": "additional structure kind specific data (e.g. initial spin)" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "symmetry", + "required": false, + "valid_types": "SymmetryData", + "info": "the symmetry of the structure, used to construct the input .gui file (fort.34)" + }, + { + "name": "wf_folder", + "required": false, + "valid_types": "RemoteData", + "info": "An optional working directory, of a previously completed calculation, containing a fort.9 wavefunction file to restart from" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "results", + "required": true, + "valid_types": "Dict", + "info": "the data extracted from the main output file" + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "optimisation", + "required": false, + "valid_types": "TrajectoryData", + "info": "atomic configurations, for each optimisation step" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "the structure output from the calculation" + }, + { + "name": "symmetry", + "required": false, + "valid_types": "SymmetryData", + "info": "the symmetry data from the calculation" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "the main (stdout) output file was not found" + }, + { + "status": 211, + "message": "the temporary retrieved folder was not found" + }, + { + "status": 300, + "message": "An error was flagged trying to parse the crystal exec stdout file" + }, + { + "status": 301, + "message": "An error occurred parsing the 'opta'/'optc' geometry files" + }, + { + "status": 302, + "message": "The crystal exec stdout file denoted that the run was a testgeom" + }, + { + "status": 350, + "message": "the input file could not be read by CRYSTAL" + }, + { + "status": 351, + "message": "CRYSTAL could not find the required wavefunction file" + }, + { + "status": 352, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 353, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 354, + "message": "Error in CHEMOD basis set modification" + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 401, + "message": "The calculation stopped prematurely because it ran out of memory." + }, + { + "status": 402, + "message": "The calculation stopped prematurely because it ran out of virtual memory." + }, + { + "status": 411, + "message": "SCF convergence did not finalise (usually due to reaching step limit)" + }, + { + "status": 412, + "message": "Geometry convergence did not finalise (usually due to reaching step limit)" + }, + { + "status": 413, + "message": "an error encountered usually during geometry optimisation" + }, + { + "status": 414, + "message": "an error was encountered during an SCF computation" + }, + { + "status": 415, + "message": "an unknown error was encountered, causing the MPI to abort" + }, + { + "status": 499, + "message": "The main crystal output file flagged an unhandled error" + }, + { + "status": 510, + "message": "inconsistency in the input and output symmetry" + }, + { + "status": 520, + "message": "primitive symmops were not found in the output file" + } + ] + }, + "class": "aiida_crystal17.calculations.cry_main:CryMainCalculation" + }, + "crystal17.newk": { + "description": [ + "AiiDA calculation plugin to run the properties17 executable,", + " for NEWK calculations (to return the fermi energy)" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "the input parameters to create the properties input file." + }, + { + "name": "wf_folder", + "required": true, + "valid_types": "FolderData, RemoteData, SinglefileData", + "info": "the folder containing the wavefunction fort.9 file" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "results", + "required": true, + "valid_types": "Dict", + "info": "Summary Data extracted from the output file(s)" + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "the main (stdout) output file was not found" + }, + { + "status": 211, + "message": "the temporary retrieved folder was not found" + }, + { + "status": 300, + "message": "An error was flagged trying to parse the crystal exec stdout file" + }, + { + "status": 350, + "message": "the input file could not be read by CRYSTAL" + }, + { + "status": 351, + "message": "CRYSTAL could not find the required wavefunction file" + }, + { + "status": 352, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 353, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 354, + "message": "Error in CHEMOD basis set modification" + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 401, + "message": "The calculation stopped prematurely because it ran out of memory." + }, + { + "status": 402, + "message": "The calculation stopped prematurely because it ran out of virtual memory." + }, + { + "status": 413, + "message": "an error encountered usually during geometry optimisation" + }, + { + "status": 414, + "message": "an error was encountered during an SCF computation" + }, + { + "status": 415, + "message": "an unknown error was encountered, causing the MPI to abort" + }, + { + "status": 499, + "message": "The main crystal output file flagged an unhandled error" + } + ] + }, + "class": "aiida_crystal17.calculations.prop_newk:CryNewkCalculation" + }, + "crystal17.ppan": { + "description": [ + "AiiDA calculation plugin to run the ``properties`` executable,", + " for PPAN (Mulliken population analysis) calculations." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "the input parameters to create the properties input file." + }, + { + "name": "wf_folder", + "required": true, + "valid_types": "FolderData, RemoteData, SinglefileData", + "info": "the folder containing the wavefunction fort.9 file" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "results", + "required": true, + "valid_types": "Dict", + "info": "Summary Data extracted from the output file(s)" + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "the main (stdout) output file was not found" + }, + { + "status": 211, + "message": "the temporary retrieved folder was not found" + }, + { + "status": 300, + "message": "An error was flagged trying to parse the crystal exec stdout file" + }, + { + "status": 350, + "message": "the input file could not be read by CRYSTAL" + }, + { + "status": 351, + "message": "CRYSTAL could not find the required wavefunction file" + }, + { + "status": 352, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 352, + "message": "parser could not find the output PPAN.dat file" + }, + { + "status": 353, + "message": "Possibly due to erroneous CHEMOD basis set modification" + }, + { + "status": 353, + "message": "error parsing output PPAN.dat file" + }, + { + "status": 354, + "message": "Error in CHEMOD basis set modification" + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 401, + "message": "The calculation stopped prematurely because it ran out of memory." + }, + { + "status": 402, + "message": "The calculation stopped prematurely because it ran out of virtual memory." + }, + { + "status": 413, + "message": "an error encountered usually during geometry optimisation" + }, + { + "status": 414, + "message": "an error was encountered during an SCF computation" + }, + { + "status": 415, + "message": "an unknown error was encountered, causing the MPI to abort" + }, + { + "status": 499, + "message": "The main crystal output file flagged an unhandled error" + } + ] + }, + "class": "aiida_crystal17.calculations.prop_ppan:CryPpanCalculation" + } + }, + "aiida.cmdline.data": { + "crystal17.basis": "aiida_crystal17.cmndline.basis_set:basisset", + "crystal17.parse": "aiida_crystal17.cmndline.cmd_parser:parse", + "crystal17.symmetry": "aiida_crystal17.cmndline.symmetry:symmetry" + }, + "aiida.data": { + "crystal17.basisset": "aiida_crystal17.data.basis_set:BasisSetData", + "crystal17.gcube": "aiida_crystal17.data.gcube:GaussianCube", + "crystal17.kinds": "aiida_crystal17.data.kinds:KindData", + "crystal17.parameters": "aiida_crystal17.data.input_params:CryInputParamsData", + "crystal17.symmetry": "aiida_crystal17.data.symmetry:SymmetryData" + }, + "aiida.groups": { + "crystal17.basisset": "aiida_crystal17.data.basis_set:BasisSetFamily" + }, + "aiida.parsers": { + "crystal17.doss": "aiida_crystal17.parsers.cry_doss:CryDossParser", + "crystal17.ech3": "aiida_crystal17.parsers.cry_ech3:CryEch3Parser", + "crystal17.main": "aiida_crystal17.parsers.cry_main:CryMainParser", + "crystal17.newk": "aiida_crystal17.parsers.cry_newk:CryNewkParser", + "crystal17.ppan": "aiida_crystal17.parsers.cry_ppan:CryPpanParser" + }, + "aiida.workflows": { + "crystal17.main.base": { + "description": [ + "Workchain to run a standard CRYSTAL17 calculation,", + " with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "cry", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "basis_family", + "required": false, + "valid_types": "Str", + "info": "An alternative to specifying the basis sets manually: one can specify the name of an existing basis set family and the work chain will generate the basis sets automatically based on the input structure." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "kpoints_distance", + "required": false, + "valid_types": "Float", + "info": "The minimum desired distance in 1/\u212b between k-points in reciprocal space. The explicit k-points will be generated automatically by the input structure, and will replace the SHRINK IS value in the input parameters.Note: This methods assumes the PRIMITIVE unit cell is provided" + }, + { + "name": "kpoints_force_parity", + "required": false, + "valid_types": "Bool", + "info": "Optional input when constructing the k-points based on a desired `kpoints_distance`. Setting this to `True` will force the k-point mesh to have an even number of points along each lattice vector except for any non-periodic directions." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "results", + "required": true, + "valid_types": "Dict", + "info": "the data extracted from the main output file" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "the structure output from the calculation" + }, + { + "name": "symmetry", + "required": false, + "valid_types": "SymmetryData", + "info": "the symmetry data from the calculation" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 201, + "message": "The parameters could not be validated against the jsonschema." + }, + { + "status": 202, + "message": "The explicit `basis_sets` or `basis_family` could not be used to get the necessary basis sets." + }, + { + "status": 204, + "message": "The `metadata.options` did not specify both `resources.num_machines` and `max_wallclock_seconds`." + }, + { + "status": 300, + "message": "The calculation failed with an unrecoverable error." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 320, + "message": "The initialization calculation failed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_crystal17.workflows.crystal_main.base:CryMainBaseWorkChain" + }, + "crystal17.properties": { + "description": [ + "A WorkChain to compute properties of a structure, using CRYSTAL.", + "", + " Either a pre-computed wavefunction (fort.9) file,", + " or inputs for a CryMainCalculation, should be supplied.", + " Inputs for property calculations can then be added", + " (currently available; doss, ech3)." + ], + "spec": { + "inputs": [ + { + "name": "check_remote", + "required": false, + "valid_types": "Bool", + "info": "If a RemoteData wf_folder is input, check it contains the wavefunction file, before launching calculations. Note, this will fail if the remote computer is not immediately available" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation will be cleaned at the end of execution." + }, + { + "name": "doss", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "ech3", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "ppan", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "scf", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "test_run", + "required": false, + "valid_types": "Bool", + "info": "break off the workchain before submitting a calculation" + }, + { + "name": "wf_folder", + "required": false, + "valid_types": "FolderData, RemoteData, SinglefileData", + "info": "the folder containing the wavefunction fort.9 file" + } + ], + "outputs": [ + { + "name": "doss", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "ech3", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "ppan", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "scf", + "required": false, + "valid_types": "", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 200, + "message": "Workchain ended before submitting calculation." + }, + { + "status": 201, + "message": "Neither a wf_folder nor scf calculation was supplied." + }, + { + "status": 202, + "message": "No property calculation inputs were supplied." + }, + { + "status": 203, + "message": "The supplied folder does contain the wavefunction file." + }, + { + "status": 210, + "message": "The SCF calculation submission failed." + }, + { + "status": 301, + "message": "The SCF calculation failed." + }, + { + "status": 302, + "message": "One or more property calculations failed." + } + ] + }, + "class": "aiida_crystal17.workflows.crystal_props.base:CryPropertiesWorkChain" + }, + "crystal17.sym3d": { + "description": [ + "modify an AiiDa structure instance and compute its symmetry", + "", + " Inequivalent atomic sites are dictated by atom kinds" + ], + "spec": { + "inputs": [ + { + "name": "settings", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "cif", + "required": false, + "valid_types": "CifData", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "" + } + ], + "outputs": [ + { + "name": "symmetry", + "required": true, + "valid_types": "SymmetryData", + "info": "" + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 300, + "message": "one of either a structure or cif input must be supplied" + }, + { + "status": 301, + "message": "the supplied structure must be 3D (i.e. have all dimensions pbc=True)\"" + }, + { + "status": 302, + "message": "idealize can only be used when standardize=True" + }, + { + "status": 303, + "message": "the kind names supplied are not compatible with the structure" + }, + { + "status": 304, + "message": "error creating new structure" + }, + { + "status": 305, + "message": "error computing symmetry operations" + } + ] + }, + "class": "aiida_crystal17.workflows.symmetrise_3d_struct:Symmetrise3DStructure" + } + }, + "console_scripts": { + "mock_crystal17": "aiida_crystal17.tests.mock_crystal17:main", + "mock_properties17": "aiida_crystal17.tests.mock_properties17:main" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 6 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 5 + }, + { + "colorclass": "red", + "text": "Data", + "count": 5 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 3 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 2 + }, + { + "colorclass": "orange", + "text": "Other (Data commands, Groups)", + "count": 4 + } + ], + "pip_install_cmd": "pip install aiida-crystal17", + "is_installable": "True" + }, + "aiida-cusp": { + "code_home": "https://github.com/aiida-cusp/aiida-cusp", + "documentation_url": "https://aiida-cusp.readthedocs.io", + "entry_point_prefix": "cusp", + "pip_url": "https://pypi.org/project/aiida-cusp", + "name": "aiida-cusp", + "package_name": "aiida_cusp", + "hosted_on": "github.com", + "metadata": { + "author": "Andreas Stamminger", + "author_email": "stammingera@gmail.com", + "version": "0.1.0b2", + "description": "Custodian based VASP Plugin for AiiDA", + "classifiers": [ + "Development Status :: 4 - Beta", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Topic :: Scientific/Engineering :: Physics", + "Topic :: Scientific/Engineering :: Chemistry", + "Environment :: Plugins", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.3.0,<2.0.0", + "entry_points": { + "aiida.data": { + "cusp.kpoints": "aiida_cusp.data.inputs.vasp_kpoint:VaspKpointData", + "cusp.poscar": "aiida_cusp.data.inputs.vasp_poscar:VaspPoscarData", + "cusp.incar": "aiida_cusp.data.inputs.vasp_incar:VaspIncarData", + "cusp.potcar": "aiida_cusp.data.inputs.vasp_potcar:VaspPotcarData", + "cusp.vasprun": "aiida_cusp.data.outputs.vasp_vasprun:VaspVasprunData", + "cusp.outcar": "aiida_cusp.data.outputs.vasp_outcar:VaspOutcarData", + "cusp.contcar": "aiida_cusp.data.outputs.vasp_contcar:VaspContcarData", + "cusp.chgcar": "aiida_cusp.data.outputs.vasp_chgcar:VaspChgcarData", + "cusp.wavecar": "aiida_cusp.data.outputs.vasp_wavecar:VaspWavecarData", + "cusp.generic": "aiida_cusp.data.outputs.vasp_generic:VaspGenericData", + "cusp.potcarfile": "aiida_cusp.data.inputs.vasp_potcar:VaspPotcarFile" + }, + "aiida.calculations": { + "cusp.vasp": "aiida_cusp.calculators.vasp_calculation:VaspCalculation" + }, + "aiida.parsers": { + "cusp.default": "aiida_cusp.parsers.vasp_file_parser:VaspFileParser" + }, + "aiida.cmdline.data": { + "potcar": "aiida_cusp.cli.potcar_cmd:potcar" + } + }, + "commits_count": 68, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 11 + }, + { + "colorclass": "orange", + "text": "Other (Data commands)", + "count": 1 + } + ], + "pip_install_cmd": "pip install https://pypi.org/project/aiida-cusp", + "is_installable": "False" + }, + "aiida-dataframe": { + "entry_point_prefix": "dataframe", + "plugin_info": "https://raw.github.com/janssenhenning/aiida-dataframe/main/pyproject.toml", + "code_home": "https://github.com/janssenhenning/aiida-dataframe", + "version_file": "https://raw.githubusercontent.com/janssenhenning/aiida-dataframe/main/aiida_dataframe/__init__.py", + "pip_url": "aiida-dataframe", + "documentation_url": "https://aiida-dataframe.readthedocs.io/en/latest/", + "name": "aiida-dataframe", + "package_name": "aiida_dataframe", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA data plugin for pandas DataFrame objects", + "author_email": "Henning Jan\u00dfen ", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ], + "version": "0.1.3" + }, + "aiida_version": ">=1.0,<3", + "entry_points": { + "aiida.cmdline.data": { + "dataframe": "aiida_dataframe.cli:data_cli" + }, + "aiida.data": { + "dataframe.frame": "aiida_dataframe.data.dataframe:PandasFrameData" + } + }, + "commits_count": 13, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "red", + "text": "Data", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Data commands)", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-dataframe", + "is_installable": "True" + }, + "aiida-ddec": { + "code_home": "https://github.com/lsmo-epfl/aiida-ddec", + "entry_point_prefix": "ddec", + "pip_url": "git+https://github.com/yakutovicha/aiida-ddec", + "name": "aiida-ddec", + "package_name": "aiida_ddec", + "hosted_on": "github.com", + "metadata": { + "author": "Aliaksandr Yakutovich", + "author_email": "aliaksandr.yakutovich@epfl.ch", + "version": "1.1.0", + "description": "AiiDA plugin for DDEC code", + "classifiers": [ + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Framework :: AiiDA", + "Development Status :: 5 - Production/Stable" + ] + }, + "aiida_version": ">=1.1.0,<3", + "entry_points": { + "aiida.calculations": { + "ddec": { + "description": [ + "AiiDA plugin for the ddec code that performs density derived", + " electrostatic and chemical atomic population analysis." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Input parameters such as net charge, protocol, atomic densities path, ..." + }, + { + "name": "charge_density_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Use a remote folder (for restarts and similar)" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "structure_ddec", + "required": true, + "valid_types": "CifData", + "info": "structure with DDEC charges" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 101, + "message": "The retrieved folder does not contain an output file." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + } + ] + }, + "class": "aiida_ddec.calculations:DdecCalculation" + } + }, + "aiida.parsers": { + "ddec": "aiida_ddec.parsers:DdecParser" + }, + "aiida.workflows": { + "ddec.cp2k_ddec": "aiida_ddec.workchains:Cp2kDdecWorkChain" + } + }, + "commits_count": 9, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/yakutovicha/aiida-ddec", + "is_installable": "True" + }, + "aiida-defects": { + "code_home": "https://github.com/epfl-theos/aiida-defects", + "entry_point_prefix": "defects", + "pip_url": "aiida-defects", + "plugin_info": "https://raw.githubusercontent.com/epfl-theos/aiida-defects/master/pyproject.toml", + "name": "aiida-defects", + "package_name": "aiida_defects", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA-Defects is a plugin for the AiiDA computational materials science framework, and provides tools and automated workflows for the study of defects in materials.", + "author": "The AiiDA-Defects developers", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python" + ], + "version": "1.0.1" + }, + "aiida_version": ">=2.0,<3", + "entry_points": { + "aiida.data": { + "defects.array.stability": "aiida_defects.data.data:StabilityData" + }, + "aiida.workflows": { + "defects.formation_energy.chemical_potential": "aiida_defects.formation_energy.chemical_potential.chemical_potential:ChemicalPotentialWorkchain", + "defects.formation_energy.corrections.gaussian_countercharge": "aiida_defects.formation_energy.corrections.gaussian_countercharge.gaussian_countercharge:GaussianCounterChargeWorkchain", + "defects.formation_energy.corrections.gaussian_countercharge.model_potential": "aiida_defects.formation_energy.corrections.gaussian_countercharge.model_potential.model_potential:ModelPotentialWorkchain", + "defects.formation_energy.corrections.point_countercharge": "aiida_defects.formation_energy.corrections.point_countercharge.point_countercharge:PointCounterChargeWorkchain", + "defects.formation_energy.potential_alignment": "aiida_defects.formation_energy.potential_alignment.potential_alignment:PotentialAlignmentWorkchain", + "defects.formation_energy.qe": "aiida_defects.formation_energy.formation_energy_qe:FormationEnergyWorkchainQE", + "defects.formation_energy.siesta": "aiida_defects.formation_energy.formation_energy_siesta:FormatonEnergyWorkchainSiesta" + } + }, + "commits_count": 10, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "red", + "text": "Data", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 7 + } + ], + "pip_install_cmd": "pip install aiida-defects", + "is_installable": "True" + }, + "aiida-diff": { + "code_home": "https://github.com/aiidateam/aiida-diff", + "development_status": "stable", + "documentation_url": "https://aiida-diff.readthedocs.io/", + "entry_point_prefix": "diff", + "pip_url": "git+https://github.com/aiidateam/aiida-diff#egg=aiida-diff-0.1.0a0", + "name": "aiida-diff", + "package_name": "aiida_diff", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA demo plugin that wraps the `diff` executable for computing the difference between two files.", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Development Status :: 3 - Alpha", + "Framework :: AiiDA" + ], + "author": "The AiiDA Team" + }, + "aiida_version": ">=2.0,<3", + "entry_points": { + "aiida.data": { + "diff": "aiida_diff.data:DiffParameters" + }, + "aiida.calculations": { + "diff": "aiida_diff.calculations:DiffCalculation" + }, + "aiida.parsers": { + "diff": "aiida_diff.parsers:DiffParser" + }, + "aiida.cmdline.data": { + "diff": "aiida_diff.cli:data_cli" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Data commands)", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/aiidateam/aiida-diff#egg=aiida-diff-0.1.0a0", + "is_installable": "False" + }, + "aiida-donothing": { + "code_home": "https://github.com/atztogo/aiida-donothing", + "entry_point_prefix": "donothing", + "name": "aiida-donothing", + "package_name": "aiida_donothing", + "hosted_on": "github.com", + "metadata": { + "author": "Atsushi Togo", + "author_email": "atz.togo@gmail.com", + "version": "0.1", + "description": "AiiDA calculation plugin for doing nothing", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.6.5,<2.0.0", + "entry_points": { + "aiida.calculations": { + "donothing.donothing": "aiida_donothing.calculations.donothing:DoNothingCalculation" + }, + "aiida.parsers": { + "donothing.donothing": "aiida_donothing.parsers.donothing:DoNothingParser" + } + }, + "commits_count": 1, + "development_status": "planning", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "See source code repository." + }, + "aiida-dynamic-workflows": { + "code_home": "https://github.com/microsoft/aiida-dynamic-workflows", + "entry_point_prefix": "dynamic_workflows", + "name": "aiida-dynamic-workflows", + "package_name": "aiida_dynamic_workflows", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [], + "pip_install_cmd": "See source code repository." + }, + "aiida-environ": { + "code_home": "https://github.com/environ-developers/aiida-environ", + "entry_point_prefix": "environ", + "pip_url": "git+https://github.com/environ-developers/aiida-environ", + "name": "aiida-environ", + "package_name": "aiida_environ", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [], + "pip_install_cmd": "pip install git+https://github.com/environ-developers/aiida-environ" + }, + "aiida-eon": { + "code_home": "https://github.com/HaoZeke/aiida-eon", + "entry_point_prefix": "eon", + "name": "aiida-eon", + "package_name": "aiida_eon", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [], + "pip_install_cmd": "See source code repository." + }, + "aiida-eonclient": { + "code_home": "https://github.com/HaoZeke/aiida-eonclient", + "entry_point_prefix": "eonclient", + "name": "aiida-eonclient", + "package_name": "aiida_eonclient", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [], + "pip_install_cmd": "See source code repository." + }, + "aiida-fenics": { + "code_home": "https://github.com/sphuber/aiida-fenics/tree/master", + "entry_point_prefix": "fenics", + "pip_url": "git+https://github.com/sphuber/aiida-fenics", + "name": "aiida-fenics", + "package_name": "aiida_fenics", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": -1, + "development_status": "planning", + "summaryinfo": [], + "pip_install_cmd": "pip install git+https://github.com/sphuber/aiida-fenics" + }, + "aiida-firecrest": { + "code_home": "https://github.com/aiidateam/aiida-firecrest", + "entry_point_prefix": "firecrest", + "pip_url": "aiida-firecrest", + "plugin_info": "https://raw.githubusercontent.com/aiidateam/aiida-firecrest/main/pyproject.toml", + "name": "aiida-firecrest", + "package_name": "aiida_firecrest", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA Transport/Scheduler plugins for interfacing with FirecREST.", + "author_email": "Chris Sewell ", + "classifiers": [ + "Development Status :: 3 - Alpha", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: Implementation :: CPython", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "version": "0.1.0a1" + }, + "aiida_version": "<2", + "entry_points": { + "aiida.schedulers": { + "firecrest": "aiida_firecrest.scheduler:FirecrestScheduler" + }, + "aiida.transports": { + "firecrest": "aiida_firecrest.transport:FirecrestTransport" + }, + "console_scripts": { + "aiida-firecrest-cli": "aiida_firecrest.cli:main" + } + }, + "commits_count": 19, + "development_status": "alpha", + "summaryinfo": [ + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Schedulers, Transports)", + "count": 2 + } + ], + "pip_install_cmd": "pip install --pre aiida-firecrest", + "is_installable": "True" + }, + "aiida-fireworks-scheduler": { + "code_home": "https://github.com/zhubonan/aiida-fireworks-scheduler", + "development_status": "beta", + "documentation_url": "https://aiida-fireworks-scheduler.readthedocs.io", + "entry_point_prefix": "fireworks_scheduler", + "pip_url": "git+https://github.com/zhubonan/aiida-fireworks-scheduler", + "name": "aiida-fireworks-scheduler", + "package_name": "aiida_fireworks_scheduler", + "hosted_on": "github.com", + "metadata": { + "author": "Bonan Zhu", + "author_email": "zhubonan@outlook.com", + "version": "1.2.0", + "description": "AiiDA plugin to allow using `fireworks` as the execution engine for `CalcJob`.", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Framework :: AiiDA" + ] + }, + "aiida_version": null, + "entry_points": { + "aiida.schedulers": { + "fireworks": "aiida_fireworks_scheduler.fwscheduler:FwScheduler", + "fireworks_scheduler.default": "aiida_fireworks_scheduler.fwscheduler:FwScheduler", + "fireworks_scheduler.keepenv": "aiida_fireworks_scheduler.fwscheduler:FwSchedulerKeepEnv" + }, + "aiida.cmdline.data": { + "fireworks-scheduler": "aiida_fireworks_scheduler.cmdline:fw_cli" + }, + "console_scripts": { + "arlaunch": "aiida_fireworks_scheduler.scripts.arlaunch_run:arlaunch" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Data commands, Schedulers)", + "count": 4 + } + ], + "pip_install_cmd": "pip install git+https://github.com/zhubonan/aiida-fireworks-scheduler", + "is_installable": "True" + }, + "aiida-fleur": { + "code_home": "https://github.com/JuDFTteam/aiida-fleur/tree/develop", + "development_status": "stable", + "documentation_url": "https://aiida-fleur.readthedocs.io/", + "entry_point_prefix": "fleur", + "pip_url": "aiida-fleur", + "plugin_info": "https://raw.github.com/JuDFTteam/aiida-fleur/develop/setup.json", + "name": "aiida-fleur", + "package_name": "aiida_fleur", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA Plugin for running the FLEUR code and its input generator. Also includes high-level workchains and utilities", + "author_email": "The JuDFT team ", + "classifiers": [ + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "2.0.0" + }, + "aiida_version": ">=2.0.1,<3.0.0", + "entry_points": { + "aiida.calculations": { + "fleur.fleur": { + "description": [ + "A CalcJob class that represents FLEUR DFT calculation.", + " For more information about the FLEUR-code family go to http://www.flapw.de/" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "Use a FleurinpData node that specifies the input parametersusually copy from the parent calculation, basically makesthe inp.xml file visible in the db and makes sure it has the files needed." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parent_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Use a remote or local repository folder as parent folder (also for restarts and similar). It should contain all the needed files for a Fleur calc, only edited files should be uploaded from the repository." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "This parameter data node is used to specify for some advanced features how the plugin behaves. You can add filesthe retrieve list, or add command line switches, for all available features here check the documentation." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "error_params", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_parameters", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_params_complex", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "relax_parameters", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "No retrieved folder found." + }, + { + "status": 301, + "message": "One of the output files can not be opened." + }, + { + "status": 302, + "message": "FLEUR calculation failed for unknown reason." + }, + { + "status": 303, + "message": "XML output file was not found." + }, + { + "status": 304, + "message": "Parsing of XML output file failed." + }, + { + "status": 305, + "message": "Parsing of relax XML output file failed." + }, + { + "status": 310, + "message": "FLEUR calculation failed due to lack of memory." + }, + { + "status": 311, + "message": "FLEUR calculation failed because an atom spilled to thevacuum during relaxation" + }, + { + "status": 312, + "message": "FLEUR calculation failed due to MT overlap." + }, + { + "status": 313, + "message": "Overlapping MT-spheres during relaxation." + }, + { + "status": 314, + "message": "Problem with cdn is suspected. Consider removing cdn" + }, + { + "status": 315, + "message": "The LDA+U density matrix contains invalid elements." + }, + { + "status": 316, + "message": "Calculation failed due to time limits." + }, + { + "status": 318, + "message": "Calculation failed due to missing dependency ({name}) for given calculation." + } + ] + }, + "class": "aiida_fleur.calculation.fleur:FleurCalculation" + }, + "fleur.inpgen": { + "description": [ + "JobCalculationClass for the inpgen, which is a preprocessor for a FLEUR calculation.", + " For more information about produced files and the FLEUR-code family, go to http://www.flapw.de/." + ], + "spec": { + "inputs": [ + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Choose the input structure to use" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use a node that specifies the input parameters for the namelists" + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "This parameter data node is used to specify for some advanced features how the plugin behaves. You can add filesthe retrieve list, or add command line switches, for all available features here check the documentation." + } + ], + "outputs": [ + { + "name": "fleurinp", + "required": true, + "valid_types": "FleurinpData", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "No retrieved folder found." + }, + { + "status": 301, + "message": "One of the output files can not be opened." + }, + { + "status": 306, + "message": "XML input file was not found." + }, + { + "status": 307, + "message": "Some required files were not retrieved." + }, + { + "status": 308, + "message": "During parsing: FleurinpData could not be initialized, see log. " + }, + { + "status": 309, + "message": "During parsing: FleurinpData failed validation." + }, + { + "status": 310, + "message": "The profile {profile} is not known to the used inpgen code" + } + ] + }, + "class": "aiida_fleur.calculation.fleurinputgen:FleurinputgenCalculation" + } + }, + "aiida.data": { + "fleur.fleurinp": "aiida_fleur.data.fleurinp:FleurinpData" + }, + "aiida.parsers": { + "fleur.fleurinpgenparser": "aiida_fleur.parsers.fleur_inputgen:Fleur_inputgenParser", + "fleur.fleurparser": "aiida_fleur.parsers.fleur:FleurParser" + }, + "aiida.workflows": { + "fleur.banddos": { + "description": [ + "This workflow calculated a bandstructure from a Fleur calculation", + "", + " :Params: a Fleurcalculation node", + " :returns: Success, last result node, list with convergence behavior" + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "" + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "remote", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "" + }, + { + "name": "scf", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "banddos_calc", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "output_banddos_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_banddos_wc_bands", + "required": false, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "output_banddos_wc_dos", + "required": false, + "valid_types": "XyData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "Invalid input configuration." + }, + { + "status": 233, + "message": "Invalid code node specified, check inpgen and fleur code nodes." + }, + { + "status": 235, + "message": "Input file modification failed." + }, + { + "status": 236, + "message": "Input file was corrupted after user's modifications." + }, + { + "status": 334, + "message": "SCF calculation failed." + }, + { + "status": 335, + "message": "Found no SCF calculation remote repository." + } + ] + }, + "class": "aiida_fleur.workflows.banddos:FleurBandDosWorkChain" + }, + "fleur.base": { + "description": [ + "Workchain to run a FLEUR calculation with automated error handling and restarts" + ], + "spec": { + "inputs": [ + { + "name": "options", + "required": true, + "valid_types": "Dict", + "info": "Optional parameters to set up computational details." + }, + { + "name": "add_comp_para", + "required": false, + "valid_types": "Dict", + "info": "Gives additional control over computational parametersonly_even_MPI: set to true if you want to suppress odd number of MPI processes in parallelisation.This might speedup a calculation for machines having even number of sockets per node.max_queue_nodes: maximal number of nodes allowed on the remote machine. Used only to automatically solve some FLEUR failures.max_queue_wallclock_sec: maximal wallclock time allowed on the remote machine. Used only to automatically solve some FLEUR failures." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "description", + "required": false, + "valid_types": "str, NoneType", + "info": "Calculation description." + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "Use a FleurinpData node that specifies the input parametersusually copy from the parent calculation, basically makesthe inp.xml file visible in the db and makes sure it has the files needed." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "label", + "required": false, + "valid_types": "str, NoneType", + "info": "Calculation label." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parent_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Use a remote or local repository folder as parent folder (also for restarts and similar). It should contain all the needed files for a Fleur calc, only edited files should be uploaded from the repository." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "This parameter data node is used to specify for some advanced features how the plugin behaves. You can add filesthe retrieve list, or add command line switches, for all available features here check the documentation." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "error_params", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_parameters", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_params_complex", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "relax_parameters", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 311, + "message": "FLEUR calculation failed because an atom spilled to thevacuum during relaxation" + }, + { + "status": 313, + "message": "Overlapping MT-spheres during relaxation." + }, + { + "status": 388, + "message": "Computational resources are not optimal." + }, + { + "status": 389, + "message": "Computational resources are not optimal." + }, + { + "status": 390, + "message": "Computational resources are not optimal." + }, + { + "status": 399, + "message": "FleurCalculation failed and FleurBaseWorkChain has no strategy to resolve this" + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_fleur.workflows.base_fleur:FleurBaseWorkChain" + }, + "fleur.base_relax": { + "description": [ + "Workchain to run Relax WorkChain with automated error handling and restarts" + ], + "spec": { + "inputs": [ + { + "name": "scf", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "description", + "required": false, + "valid_types": "str, NoneType", + "info": "Calculation description." + }, + { + "name": "final_scf", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "label", + "required": false, + "valid_types": "str, NoneType", + "info": "Calculation label." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "last_scf", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "optimized_structure", + "required": true, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "output_relax_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 399, + "message": "FleurRelaxWorkChain failed and FleurBaseRelaxWorkChain has no strategy to resolve this" + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_fleur.workflows.base_relax:FleurBaseRelaxWorkChain" + }, + "fleur.cfcoeff": { + "description": [ + "Workflow for calculating rare-earth crystal field coefficients" + ], + "spec": { + "inputs": [ + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "orbcontrol", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "scf", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "scf_rare_earth_analogue", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_cfcoeff_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_cfcoeff_wc_charge_densities", + "required": false, + "valid_types": "XyData", + "info": "" + }, + { + "name": "output_cfcoeff_wc_potentials", + "required": false, + "valid_types": "XyData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "Invalid input configuration." + }, + { + "status": 235, + "message": "Input file modification failed." + }, + { + "status": 236, + "message": "Input file was corrupted after user's modifications." + }, + { + "status": 345, + "message": "Convergence scf workflow failed." + }, + { + "status": 451, + "message": "Convergence orbcontrol workflow failed." + }, + { + "status": 452, + "message": "CF calculation failed." + } + ] + }, + "class": "aiida_fleur.workflows.cfcoeff:FleurCFCoeffWorkChain" + }, + "fleur.corehole": { + "description": [ + "Turn key solution for a corehole calculation with the FLEUR code.", + " Has different protocols for different core-hole types (valence, charge).", + "", + " Calculates supercells. Extracts binding energies", + " for certain corelevels from the total energy differences a the calculation with", + " corehole and without.", + "", + " Documentation:", + " See help for details.", + "", + " Two paths are possible:", + "", + " (1) Start from a structure -> workchains run inpgen first (recommended)", + " (2) Start from a Fleurinp data object", + "", + " Also it is recommended to provide a calc parameter node for the structure", + "", + " :param wf_parameters: Dict node, specify, resources and what should be calculated", + " :param structure: structureData node, crystal structure", + " :param calc_parameters: Dict node, inpgen parameters for the crystal structure", + " :param fleurinp: fleurinpData node,", + " :param inpgen: Code node,", + " :param fleur: Code node,", + "", + " :return: output_corehole_wc_para Dict node, successful=True if no error", + "", + " :uses workchains: fleur_scf_wc, fleur_relax_wc", + " :uses calcfunctions: supercell, create_corehole_result_node, prepare_struc_corehole_wf" + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "inpgen", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "calc_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData, NoneType", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_corehole_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 1, + "message": "The input resources are invalid." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 2, + "message": "Input resources are missing." + }, + { + "status": 3, + "message": "The code provided is invalid, or not of the right kind." + }, + { + "status": 4, + "message": "Inpgen calculation FAILED, check output" + }, + { + "status": 5, + "message": "Changing of the FLEURINP data went wrong, check log." + }, + { + "status": 6, + "message": "The FLEUR input file for the calculation did not validate." + }, + { + "status": 7, + "message": "At least one FLEUR calculation FAILED, check the output and log." + }, + { + "status": 8, + "message": "At least one FLEUR calculation did not/could not reach thedesired convergece Criteria, with the current parameters." + }, + { + "status": 9, + "message": "Something went wrong in the determiation what coreholes to calculate, probably the input format was not correct. Check log." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_fleur.workflows.corehole:FleurCoreholeWorkChain" + }, + "fleur.create_magnetic": { + "description": [ + "This workflow creates relaxed magnetic film on a substrate." + ], + "spec": { + "inputs": [ + { + "name": "distance_suggestion", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "eos", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "eos_output", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "interlayer_dist", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "optimized_structure", + "required": false, + "valid_types": "StructureData, NoneType", + "info": "" + }, + { + "name": "relax", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "magnetic_structure", + "required": true, + "valid_types": "StructureData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "Invalid input configuration." + }, + { + "status": 380, + "message": "Specified substrate has to be bcc or fcc." + }, + { + "status": 382, + "message": "Relaxation calculation failed." + }, + { + "status": 383, + "message": "EOS WorkChain failed." + } + ] + }, + "class": "aiida_fleur.workflows.create_magnetic_film:FleurCreateMagneticWorkChain" + }, + "fleur.dmi": { + "description": [ + "This workflow calculates DMI energy dispersion of a structure." + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "remote", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "" + }, + { + "name": "scf", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_dmi_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "Invalid input configuration." + }, + { + "status": 233, + "message": "Invalid code node specified, check inpgen and fleur code nodes." + }, + { + "status": 235, + "message": "Input file modification failed." + }, + { + "status": 236, + "message": "Input file was corrupted after user's modifications." + }, + { + "status": 334, + "message": "Reference calculation failed." + }, + { + "status": 335, + "message": "Found no reference calculation remote repository." + }, + { + "status": 336, + "message": "Force theorem calculation failed." + } + ] + }, + "class": "aiida_fleur.workflows.dmi:FleurDMIWorkChain" + }, + "fleur.dos": { + "description": [ + "DEPRECATED: Use FleurBandDosWorkChain instead (entrypoint fleur.banddos)", + " This workflow calculated a DOS from a Fleur calculation", + "", + " :Params: a Fleurcalculation node", + " :returns: Success, last result node, list with convergence behavior", + "", + " wf_parameters: { 'tria', 'nkpts', 'sigma', 'emin', 'emax'}", + " defaults : tria = True, nkpts = 800, sigma=0.005, emin= -0.3, emax = 0.8" + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "calc_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "remote_data", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_fleur.workflows.dos:fleur_dos_wc" + }, + "fleur.eos": { + "description": [ + "This workflow calculates the equation of states of a structure.", + " Calculates several unit cells with different volumes.", + " A Birch_Murnaghan equation of states fit determines the Bulk modulus and the", + " groundstate volume of the cell.", + "", + " :params wf_parameters: Dict node, optional 'wf_parameters', protocol specifying parameter dict", + " :params structure: StructureData node, 'structure' crystal structure", + " :params calc_parameters: Dict node, optional 'calc_parameters' parameters for inpgen", + " :params inpgen: Code node,", + " :params fleur: Code node,", + "", + "", + " :return output_eos_wc_para: Dict node, contains relevant output information.", + " about general succeed, fit results and so on." + ], + "spec": { + "inputs": [ + { + "name": "scf", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_eos_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_eos_wc_structure", + "required": true, + "valid_types": "StructureData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 400, + "message": "At least one of the SCF sub processes did not finish successfully." + } + ] + }, + "class": "aiida_fleur.workflows.eos:FleurEosWorkChain" + }, + "fleur.init_cls": { + "description": [ + "Turn key solution for the calculation of core level shift" + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "calc_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "" + }, + { + "name": "inpgen", + "required": false, + "valid_types": "Code, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData, NoneType", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_initial_cls_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_fleur.workflows.initial_cls:FleurInitialCLSWorkChain" + }, + "fleur.mae": { + "description": [ + "This workflow calculates the Magnetic Anisotropy Energy of a structure." + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "remote", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "" + }, + { + "name": "scf", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_mae_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "Invalid input configuration." + }, + { + "status": 233, + "message": "Invalid code node specified, check inpgen and fleur code nodes." + }, + { + "status": 235, + "message": "Input file modification failed." + }, + { + "status": 236, + "message": "Input file was corrupted after user's modifications." + }, + { + "status": 334, + "message": "Reference calculation failed." + }, + { + "status": 335, + "message": "Found no reference calculation remote repository." + }, + { + "status": 336, + "message": "Force theorem calculation failed." + } + ] + }, + "class": "aiida_fleur.workflows.mae:FleurMaeWorkChain" + }, + "fleur.mae_conv": { + "description": [ + "This workflow calculates the Magnetic Anisotropy Energy of a structure." + ], + "spec": { + "inputs": [ + { + "name": "scf", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_mae_conv_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 343, + "message": "Convergence MAE calculation failed for all SQAs." + }, + { + "status": 344, + "message": "Convergence MAE calculation failed for some SQAs." + } + ] + }, + "class": "aiida_fleur.workflows.mae_conv:FleurMaeConvWorkChain" + }, + "fleur.orbcontrol": { + "description": [ + "Workchain for determining the groundstate density matrix in an DFT+U", + " calculation. This is done in 2 or 3 steps:", + "", + " 1. Converge the system without DFT+U (a converged calculation can be", + " provided to skip this step)", + " 2. A fixed number of iterations is run with fixed density matrices", + " either generated as all distinct permutations for the given occupations", + " or the explicitly given configurations", + " 3. The system and density matrix is relaxed", + "", + " :param wf_parameters: (Dict), Workchain Specifications", + " :param scf_no_ldau: (Dict), Inputs to a FleurScfWorkChain providing the initial system", + " either converged or staring from a structure", + " :param scf_with_ldau: (Dict), Inputs to a FleurScfWorkChain. Only the wf_parameters are valid", + " :param fleurinp: (FleurinpData) FleurinpData to start from if no SCF should be done", + " :param remote: (RemoteData) RemoteData to start from if no SCF should be done", + " :param structure: (StructureData) Structure to start from if no SCF should be done", + " :param calc_parameters: (Dict), Inpgen Parameters", + " :param settings: (Dict), additional settings for e.g retrieving files", + " :param options: (Dict), Options for the submission of the jobs", + " :param inpgen: (Code)", + " :param fleur: (Code)" + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "calc_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "fixed_remotes", + "required": false, + "valid_types": "RemoteData", + "info": "" + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "" + }, + { + "name": "inpgen", + "required": false, + "valid_types": "Code, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "options_inpgen", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "relaxed_remotes", + "required": false, + "valid_types": "RemoteData", + "info": "" + }, + { + "name": "remote", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "" + }, + { + "name": "scf_no_ldau", + "required": false, + "valid_types": "Data", + "info": "Inputs for SCF Workchain before adding LDA+U" + }, + { + "name": "scf_with_ldau", + "required": false, + "valid_types": "Data", + "info": "Inputs for SCF Workchain after the LDA+U matrix was fixed" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "settings_inpgen", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData, NoneType", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "groundstate_scf", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "output_orbcontrol_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "groundstate_denmat", + "required": false, + "valid_types": "SinglefileData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "Invalid input configuration." + }, + { + "status": 233, + "message": "Input codes do not correspond to fleur or inpgen respectively." + }, + { + "status": 235, + "message": "Input file modification failed." + }, + { + "status": 236, + "message": "Input file was corrupted after user's modifications." + }, + { + "status": 342, + "message": "Convergence LDA+U calculation failed for some Initial configurations." + }, + { + "status": 343, + "message": "Convergence LDA+U calculation failed for all Initial configurations." + }, + { + "status": 360, + "message": "Inpgen calculation failed." + }, + { + "status": 450, + "message": "Convergence workflow without LDA+U failed." + } + ] + }, + "class": "aiida_fleur.workflows.orbcontrol:FleurOrbControlWorkChain" + }, + "fleur.relax": { + "description": [ + "This workflow performs structure optimization." + ], + "spec": { + "inputs": [ + { + "name": "scf", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "final_scf", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "last_scf", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "optimized_structure", + "required": true, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "output_relax_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "If you want to run a final scf inpgen has to be there." + }, + { + "status": 311, + "message": "FLEUR calculation failed because an atom spilled to thevacuum during relaxation" + }, + { + "status": 313, + "message": "Overlapping MT-spheres during relaxation." + }, + { + "status": 350, + "message": "Optimization cycle did not lead to convergence of forces." + }, + { + "status": 351, + "message": "SCF Workchains failed for some reason." + }, + { + "status": 352, + "message": "Found no relaxed structure info in the output of SCF" + }, + { + "status": 353, + "message": "Found no SCF output" + }, + { + "status": 354, + "message": "Force is small, switch to BFGS" + } + ] + }, + "class": "aiida_fleur.workflows.relax:FleurRelaxWorkChain" + }, + "fleur.relax_torque": { + "description": [ + "This workflow performs spin structure optimization." + ], + "spec": { + "inputs": [ + { + "name": "scf", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "final_scf", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_relax_torque_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "If you want to run a final scf inpgen has to be there." + }, + { + "status": 350, + "message": "Optimization cycle did not lead to convergence." + }, + { + "status": 351, + "message": "An SCF Workchain failed for some reason." + } + ] + }, + "class": "aiida_fleur.workflows.relax_torque:FleurRelaxTorqueWorkChain" + }, + "fleur.scf": { + "description": [ + "Workchain for converging a FLEUR calculation (SCF).", + "", + " It converges the charge density, total energy or the largest force.", + " Two paths are possible:", + "", + " (1) Start from a structure and run the inpgen first optional with calc_parameters", + " (2) Start from a Fleur calculation, with optional remoteData", + "", + " :param wf_parameters: (Dict), Workchain Specifications", + " :param structure: (StructureData), Crystal structure", + " :param calc_parameters: (Dict), Inpgen Parameters", + " :param fleurinp: (FleurinpData), to start with a Fleur calculation", + " :param remote_data: (RemoteData), from a Fleur calculation", + " :param inpgen: (Code)", + " :param fleur: (Code)", + "", + " :return: output_scf_wc_para (Dict), Information of workflow results", + " like Success, last result node, list with convergence behavior" + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "calc_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "" + }, + { + "name": "inpgen", + "required": false, + "valid_types": "Code, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "remote_data", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "settings_inpgen", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData, NoneType", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "fleurinp", + "required": true, + "valid_types": "FleurinpData", + "info": "" + }, + { + "name": "last_calc", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "output_scf_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "Invalid input configuration." + }, + { + "status": 233, + "message": "Input codes do not correspond to fleur or inpgen respectively." + }, + { + "status": 235, + "message": "Input file modification failed." + }, + { + "status": 236, + "message": "Input file was corrupted after user's modifications." + }, + { + "status": 360, + "message": "Inpgen calculation failed." + }, + { + "status": 361, + "message": "Fleur calculation failed." + }, + { + "status": 362, + "message": "SCF cycle did not lead to convergence." + } + ] + }, + "class": "aiida_fleur.workflows.scf:FleurScfWorkChain" + }, + "fleur.ssdisp": { + "description": [ + "This workflow calculates spin spiral dispersion of a structure." + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "fleurinp", + "required": false, + "valid_types": "FleurinpData, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "remote", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "" + }, + { + "name": "scf", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_ssdisp_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 231, + "message": "Invalid input configuration." + }, + { + "status": 233, + "message": "Invalid code node specified, check inpgen and fleur code nodes." + }, + { + "status": 235, + "message": "Input file modification failed." + }, + { + "status": 236, + "message": "Input file was corrupted after user's modifications." + }, + { + "status": 334, + "message": "Reference calculation failed." + }, + { + "status": 335, + "message": "Found no reference calculation remote repository." + }, + { + "status": 336, + "message": "Force theorem calculation failed." + } + ] + }, + "class": "aiida_fleur.workflows.ssdisp:FleurSSDispWorkChain" + }, + "fleur.ssdisp_conv": { + "description": [ + "This workflow calculates the Spin Spiral Dispersion of a structure." + ], + "spec": { + "inputs": [ + { + "name": "scf", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_ssdisp_conv_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 230, + "message": "Invalid workchain parameters." + }, + { + "status": 340, + "message": "Convergence SSDisp calculation failed for all q-vectors." + }, + { + "status": 341, + "message": "Convergence SSDisp calculation failed for some q-vectors." + } + ] + }, + "class": "aiida_fleur.workflows.ssdisp_conv:FleurSSDispConvWorkChain" + }, + "fleur.strain": { + "description": [ + "This workflow calculates the deformation potential a structure = -BdEg/dP = d(Eg)/d(ln(V)).", + " Calculates several unit cells with different volumes.", + " A Birch_Murnaghan equation of states fit determines the Bulk modulus(B) and the", + " ground-state volume of the cell.", + "", + " :params wf_parameters: Dict node, optional 'wf_parameters', protocol specifying parameter dict", + " :params structure: StructureData node, 'structure' crystal structure", + " :params calc_parameters: Dict node, optional 'calc_parameters' parameters for inpgen", + " :params inpgen: Code node,", + " :params fleur: Code node,", + "", + "", + " :return output_strain_wc_para: Dict node, contains relevant output information.", + " about general succeed, fit results and so on." + ], + "spec": { + "inputs": [ + { + "name": "fleur", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "inpgen", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "calc_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "wf_parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_strain_wc_para", + "required": true, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 331, + "message": "Invalid code node specified, check inpgen and fleur code nodes." + } + ] + }, + "class": "aiida_fleur.workflows.strain:FleurStrainWorkChain" + } + }, + "console_scripts": { + "aiida-fleur": "aiida_fleur.cmdline:cmd_root" + } + }, + "commits_count": 202, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 2 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 2 + }, + { + "colorclass": "red", + "text": "Data", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 19 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-fleur", + "is_installable": "True" + }, + "aiida-flexpart": { + "code_home": "https://github.com/aiidaplugins/aiida-flexpart", + "entry_point_prefix": "flexpart", + "pip_url": "git+https://github.com/aiidaplugins/aiida-flexpart", + "name": "aiida-flexpart", + "package_name": "aiida_flexpart", + "hosted_on": "github.com", + "metadata": { + "author": "The AiiDA Team", + "author_email": "aliaksandr.yakutovich@empa.ch", + "version": "0.1.0a0", + "description": "AiiDA plugin for the FLEXPART code (simulation of atmospheric transport processes).", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.6.5,<3.0.0", + "entry_points": { + "aiida.calculations": { + "flexpart.cosmo": "aiida_flexpart.calculations.cosmo:FlexpartCosmoCalculation" + }, + "aiida.parsers": { + "flexpart.cosmo": "aiida_flexpart.parsers.cosmo:FlexpartCosmoParser" + }, + "aiida.workflows": { + "flexpart.multi_dates": "aiida_flexpart.workflows.multi_dates_workflow:FlexpartMultipleDatesWorkflow" + } + }, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/aiidaplugins/aiida-flexpart" + }, + "aiida-gaussian": { + "code_home": "https://github.com/nanotech-empa/aiida-gaussian", + "entry_point_prefix": "gaussian", + "pip_url": "aiida-gaussian", + "plugin_info": "https://raw.githubusercontent.com/nanotech-empa/aiida-gaussian/master/pyproject.toml", + "name": "aiida-gaussian", + "package_name": "aiida_gaussian", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for the Gaussian quantum chemistry software.", + "author": "Kristjan Eimre, Pezhman Zarabadi-Poor, Aliaksandr Yakutovich", + "license": "MIT", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Scientific/Engineering :: Physics", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "version": "2.0.4" + }, + "aiida_version": ">=2.0.0,<3.0.0", + "entry_points": { + "aiida.calculations": { + "gaussian": "aiida_gaussian.calculations:GaussianCalculation", + "gaussian.cubegen": "aiida_gaussian.calculations:CubegenCalculation", + "gaussian.formchk": "aiida_gaussian.calculations:FormchkCalculation" + }, + "aiida.parsers": { + "gaussian.advanced": "aiida_gaussian.parsers.gaussian:GaussianAdvancedParser", + "gaussian.base": "aiida_gaussian.parsers.gaussian:GaussianBaseParser", + "gaussian.cubegen_base": "aiida_gaussian.parsers.cubegen:CubegenBaseParser" + }, + "aiida.workflows": { + "gaussian.base": "aiida_gaussian.workchains:GaussianBaseWorkChain", + "gaussian.cubes": "aiida_gaussian.workchains:GaussianCubesWorkChain" + } + }, + "commits_count": 20, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 3 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 3 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 2 + } + ], + "pip_install_cmd": "pip install aiida-gaussian", + "is_installable": "False" + }, + "aiida-gaussian-datatypes": { + "code_home": "https://github.com/dev-zero/aiida-gaussian-datatypes", + "documentation_url": "https://github.com/dev-zero/aiida-gaussian-datatypes/blob/master/README.md", + "entry_point_prefix": "gaussian", + "pip_url": "aiida-gaussian-datatypes", + "plugin_info": "https://raw.github.com/dev-zero/aiida-gaussian-datatypes/master/setup.json", + "name": "aiida-gaussian-datatypes", + "package_name": "aiida_gaussian_datatypes", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA data plugin to manage gaussian datatypes (basis sets and pseudopotentials) as first-class citizens", + "author": "Tiziano M\u00fcller", + "author_email": "tiziano.mueller@chem.uzh.ch", + "license": "MIT License", + "home_page": "https://github.com/dev-zero/aiida-gaussian-datatypes", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "version": "0.5.1" + }, + "aiida_version": ">=1.6.2", + "entry_points": { + "aiida.cmdline.data": { + "gaussian.basisset": "aiida_gaussian_datatypes.basisset.cli:cli", + "gaussian.pseudo": "aiida_gaussian_datatypes.pseudopotential.cli:cli" + }, + "aiida.data": { + "gaussian.basisset": "aiida_gaussian_datatypes.basisset.data:BasisSet", + "gaussian.pseudo": "aiida_gaussian_datatypes.pseudopotential.data:Pseudopotential" + }, + "aiida.groups": { + "gaussian.basisset": "aiida_gaussian_datatypes.groups:BasisSetGroup", + "gaussian.pseudo": "aiida_gaussian_datatypes.groups:PseudopotentialGroup" + } + }, + "commits_count": 0, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "red", + "text": "Data", + "count": 2 + }, + { + "colorclass": "orange", + "text": "Other (Data commands, Groups)", + "count": 4 + } + ], + "pip_install_cmd": "pip install aiida-gaussian-datatypes", + "is_installable": "True" + }, + "aiida-gollum": { + "code_home": "https://github.com/garsua/aiida-gollum/", + "documentation_url": "https://aiida-gollum.readthedocs.io/", + "entry_point_prefix": "gollum", + "pip_url": "git+https://github.com/garsua/aiida-gollum", + "name": "aiida-gollum", + "package_name": "aiida_gollum", + "hosted_on": "github.com", + "metadata": { + "author": "Victor M. Garcia-Suarez", + "author_email": "vm.garcia@cinn.es", + "version": "0.12.0", + "description": "A plugin for Gollum functionality within AiiDA framework.", + "classifiers": [ + "License :: OSI Approved :: MIT License", + "Framework :: AiiDA", + "Programming Language :: Python :: 2.7", + "Development Status :: 1 - Alpha" + ] + }, + "aiida_version": ">=0.12.0", + "entry_points": { + "aiida.calculations": { + "gollum.gollum": "aiida_gollum.calculations.gollum:GollumCalculation" + }, + "aiida.parsers": { + "gollum.parser": "aiida_gollum.parsers.gollum:GollumParser" + } + }, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/garsua/aiida-gollum" + }, + "aiida-graphql": { + "code_home": "https://github.com/dev-zero/aiida-graphql", + "entry_point_prefix": "graphql", + "pip_url": "aiida-graphql", + "name": "aiida-graphql", + "package_name": "aiida_graphql", + "hosted_on": "github.com", + "metadata": { + "description": "Strawberry-based GraphQL API Server for AiiDA", + "author": "Tiziano M\u00fcller", + "author_email": "tiziano.mueller@chem.uzh.ch", + "license": "MIT", + "home_page": "https://github.com/dev-zero/aiida-graphql", + "classifiers": [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "version": "0.0.2" + }, + "aiida_version": ">=1.0.0b6,<2.0.0", + "entry_points": {}, + "commits_count": 0, + "development_status": "alpha", + "summaryinfo": [], + "pip_install_cmd": "pip install aiida-graphql", + "is_installable": "True" + }, + "aiida-gromacs": { + "code_home": "https://github.com/jimboid/aiida-gromacs", + "documentation_url": "https://aiida-gromacs.readthedocs.io/", + "entry_point_prefix": "gromacs", + "pip_url": "git+https://github.com/jimboid/aiida-gromacs", + "name": "aiida-gromacs", + "package_name": "aiida_gromacs", + "hosted_on": "github.com", + "metadata": { + "description": "A plugin for using GROMACS with AiiDA for molecular dymanics simulations.", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Development Status :: 3 - Alpha", + "Framework :: AiiDA" + ], + "author": "James Gebbie-Rayet", + "author_email": "james.gebbie@stfc.ac.uk" + }, + "aiida_version": ">=2.0,<3", + "entry_points": { + "aiida.data": { + "gromacs.pdb2gmx": "aiida_gromacs.data.pdb2gmx:Pdb2gmxParameters", + "gromacs.editconf": "aiida_gromacs.data.editconf:EditconfParameters", + "gromacs.genion": "aiida_gromacs.data.genion:GenionParameters", + "gromacs.grompp": "aiida_gromacs.data.grompp:GromppParameters", + "gromacs.mdrun": "aiida_gromacs.data.mdrun:MdrunParameters", + "gromacs.solvate": "aiida_gromacs.data.solvate:SolvateParameters" + }, + "aiida.calculations": { + "gromacs.pdb2gmx": { + "description": [ + "AiiDA calculation plugin wrapping the 'gmx pdb2gmx' executable.", + "", + " AiiDA plugin wrapper for converting PDB files to GRO files." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Pdb2gmxParameters", + "info": "Command line parameters for gmx pdb2gmx" + }, + { + "name": "pdbfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input structure." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "grofile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output forcefield compliant file." + }, + { + "name": "itpfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output forcefield compliant file." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "stdout", + "required": true, + "valid_types": "SinglefileData", + "info": "stdout" + }, + { + "name": "topfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output forcefield compliant file." + }, + { + "name": "n_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Output index file" + }, + { + "name": "q_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Output Structure file" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Calculation did not produce all expected output files." + } + ] + }, + "class": "aiida_gromacs.calculations.pdb2gmx:Pdb2gmxCalculation" + }, + "gromacs.editconf": { + "description": [ + "AiiDA calculation plugin wrapping the 'gmx editconf' executable.", + "", + " AiiDA plugin wrapper for adding a simulation box to structure file." + ], + "spec": { + "inputs": [ + { + "name": "grofile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input structure file." + }, + { + "name": "parameters", + "required": true, + "valid_types": "EditconfParameters", + "info": "Command line parameters for gmx editconf." + }, + { + "name": "bf_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Generic data file." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "n_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Index file." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "grofile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output file containing simulation box." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "stdout", + "required": true, + "valid_types": "SinglefileData", + "info": "stdout" + }, + { + "name": "mead_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Coordination file for MEAD" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Calculation did not produce all expected output files." + } + ] + }, + "class": "aiida_gromacs.calculations.editconf:EditconfCalculation" + }, + "gromacs.genion": { + "description": [ + "AiiDA calculation plugin wrapping the 'gmx genion' executable.", + "", + " AiiDA plugin wrapper for converting PDB files to GRO files." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "GenionParameters", + "info": "Command line parameters for gmx genion" + }, + { + "name": "topfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input topology file." + }, + { + "name": "tprfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input tpr file." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "n_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Index file." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "grofile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output gro file with ions added." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "stdout", + "required": true, + "valid_types": "SinglefileData", + "info": "stdout" + }, + { + "name": "topfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output topology with ions added." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Calculation did not produce all expected output files." + } + ] + }, + "class": "aiida_gromacs.calculations.genion:GenionCalculation" + }, + "gromacs.grompp": { + "description": [ + "AiiDA calculation plugin wrapping the 'gmx grompp' executable.", + "", + " AiiDA plugin wrapper for converting PDB files to GRO files." + ], + "spec": { + "inputs": [ + { + "name": "grofile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input structure" + }, + { + "name": "mdpfile", + "required": true, + "valid_types": "SinglefileData", + "info": "grompp run file." + }, + { + "name": "parameters", + "required": true, + "valid_types": "GromppParameters", + "info": "Command line parameters for gmx grompp" + }, + { + "name": "topfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input topology" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "e_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Energy file" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "n_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Index file" + }, + { + "name": "qmi_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "QM input file" + }, + { + "name": "r_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Structure file" + }, + { + "name": "rb_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Structure file" + }, + { + "name": "ref_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Full precision trajectory file" + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "t_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Full precision trajectory file" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "stdout", + "required": true, + "valid_types": "SinglefileData", + "info": "stdout" + }, + { + "name": "tprfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output gro file ready for adding ions." + }, + { + "name": "imd_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Coordinate file in Gromos-87 format" + }, + { + "name": "po_file", + "required": false, + "valid_types": "SinglefileData", + "info": "grompp input file with MD parameters" + }, + { + "name": "pp_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Topology file" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Calculation did not produce all expected output files." + } + ] + }, + "class": "aiida_gromacs.calculations.grompp:GromppCalculation" + }, + "gromacs.mdrun": { + "description": [ + "AiiDA calculation plugin wrapping the 'gmx mdrun' executable.", + "", + " AiiDA plugin wrapper for converting PDB files to GRO files." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "MdrunParameters", + "info": "Command line parameters for gmx mdrun" + }, + { + "name": "tprfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input structure." + }, + { + "name": "awh_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "xvgr/xmgr file" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "cpi_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Checkpoint file" + }, + { + "name": "ei_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "ED sampling input" + }, + { + "name": "membed_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Generic data file" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "mn_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Index file" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "mp_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Topology file" + }, + { + "name": "multidir_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Run directory" + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "rerun_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Trajectory: xtc trr cpt gro g96 pdb tng" + }, + { + "name": "table_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "xvgr/xmgr file" + }, + { + "name": "tableb_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "xvgr/xmgr file" + }, + { + "name": "tablep_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "xvgr/xmgr file" + } + ], + "outputs": [ + { + "name": "enfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output energy file." + }, + { + "name": "grofile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output structure file." + }, + { + "name": "logfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output log file." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "stdout", + "required": true, + "valid_types": "SinglefileData", + "info": "stdout" + }, + { + "name": "trrfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output trajectory." + }, + { + "name": "cptfile", + "required": false, + "valid_types": "SinglefileData", + "info": "Checkpoint file." + }, + { + "name": "dhdl_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "eo_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "field_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "if_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "mtx_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Hessian Matrix" + }, + { + "name": "pf_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "px_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "ra_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Log file" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "ro_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "rs_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Log file" + }, + { + "name": "rt_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Log file" + }, + { + "name": "swap_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "tpi_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "tpid_file", + "required": false, + "valid_types": "SinglefileData", + "info": "xvgr/xmgr file" + }, + { + "name": "x_file", + "required": false, + "valid_types": "SinglefileData", + "info": "Compressed trajectory (tng format or portable xdr format)" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Calculation did not produce all expected output files." + } + ] + }, + "class": "aiida_gromacs.calculations.mdrun:MdrunCalculation" + }, + "gromacs.solvate": { + "description": [ + "AiiDA calculation plugin wrapping the 'gmx solvate' executable.", + "", + " AiiDA plugin wrapper for solvating a molecular system." + ], + "spec": { + "inputs": [ + { + "name": "grofile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input structure" + }, + { + "name": "parameters", + "required": true, + "valid_types": "SolvateParameters", + "info": "Command line parameters for gmx solvate." + }, + { + "name": "topfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input topology" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "grofile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output solvated gro file." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "stdout", + "required": true, + "valid_types": "SinglefileData", + "info": "stdout" + }, + { + "name": "topfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Output topology file." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Calculation did not produce all expected output files." + } + ] + }, + "class": "aiida_gromacs.calculations.solvate:SolvateCalculation" + }, + "general-MD": { + "description": [ + "AiiDA calculation plugin wrapping an executable with user defined", + " input and output files." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "command", + "required": false, + "valid_types": "Str, NoneType", + "info": "The command used to execute the job." + }, + { + "name": "input_files", + "required": false, + "valid_types": "SinglefileData", + "info": "Dictionary of input files." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "output_files", + "required": false, + "valid_types": "List, NoneType", + "info": "List of output file names." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "log", + "required": false, + "valid_types": "SinglefileData", + "info": "link to the default file.out." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Calculation did not produce all expected output files." + }, + { + "status": 301, + "message": "Specified output file not produced by command." + } + ] + }, + "class": "aiida_gromacs.calculations.generalMD:GeneralCalculation" + } + }, + "aiida.parsers": { + "gromacs.pdb2gmx": "aiida_gromacs.parsers.pdb2gmx:Pdb2gmxParser", + "gromacs.editconf": "aiida_gromacs.parsers.editconf:EditconfParser", + "gromacs.genion": "aiida_gromacs.parsers.genion:GenionParser", + "gromacs.grompp": "aiida_gromacs.parsers.grompp:GromppParser", + "gromacs.mdrun": "aiida_gromacs.parsers.mdrun:MdrunParser", + "gromacs.solvate": "aiida_gromacs.parsers.solvate:SolvateParser", + "general-MD": "aiida_gromacs.parsers.generalMD:GeneralParser" + }, + "aiida.workflows": { + "gromacs.setup": { + "description": [ + "WorkChain for setting up a gromacs simulation automatically." + ], + "spec": { + "inputs": [ + { + "name": "editconfparameters", + "required": true, + "valid_types": "EditconfParameters", + "info": "Command line parameters for gmx editconf" + }, + { + "name": "genionparameters", + "required": true, + "valid_types": "GenionParameters", + "info": "Command line parameters for gmx genion" + }, + { + "name": "gromppionsparameters", + "required": true, + "valid_types": "GromppParameters", + "info": "Command line parameters for gmx grompp" + }, + { + "name": "gromppminparameters", + "required": true, + "valid_types": "GromppParameters", + "info": "Command line parameters for gmx grompp minimisation run" + }, + { + "name": "gromppnptparameters", + "required": true, + "valid_types": "GromppParameters", + "info": "Command line parameters for gmx grompp npt equilibration run" + }, + { + "name": "gromppnvtparameters", + "required": true, + "valid_types": "GromppParameters", + "info": "Command line parameters for gmx grompp nvt equilibration run" + }, + { + "name": "gromppprodparameters", + "required": true, + "valid_types": "GromppParameters", + "info": "Command line parameters for gmx grompp production run" + }, + { + "name": "ionsmdp", + "required": true, + "valid_types": "SinglefileData", + "info": "MD parameters for adding ions." + }, + { + "name": "local_code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "mdrunparameters", + "required": true, + "valid_types": "MdrunParameters", + "info": "Command line parameters for gmx mdrun production run" + }, + { + "name": "minimiseparameters", + "required": true, + "valid_types": "MdrunParameters", + "info": "Command line parameters for gmx mdrun minimisation run" + }, + { + "name": "minmdp", + "required": true, + "valid_types": "SinglefileData", + "info": "MD parameters for minimisation." + }, + { + "name": "nptmdp", + "required": true, + "valid_types": "SinglefileData", + "info": "MD parameters for NPT equilibration." + }, + { + "name": "nptparameters", + "required": true, + "valid_types": "MdrunParameters", + "info": "Command line parameters for gmx mdrun npt equilibration run" + }, + { + "name": "nvtmdp", + "required": true, + "valid_types": "SinglefileData", + "info": "MD parameters for NVT equilibration." + }, + { + "name": "nvtparameters", + "required": true, + "valid_types": "MdrunParameters", + "info": "Command line parameters for gmx mdrun nvt equilibration run" + }, + { + "name": "pdb2gmxparameters", + "required": true, + "valid_types": "Pdb2gmxParameters", + "info": "Command line parameters for gmx pdb2gmx" + }, + { + "name": "pdbfile", + "required": true, + "valid_types": "SinglefileData", + "info": "Input structure." + }, + { + "name": "prodmdp", + "required": true, + "valid_types": "SinglefileData", + "info": "MD parameters for production run." + }, + { + "name": "solvateparameters", + "required": true, + "valid_types": "SolvateParameters", + "info": "Command line parameters for gmx solvate" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "remote_code", + "required": false, + "valid_types": "Code, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "result", + "required": true, + "valid_types": "", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_gromacs.workflows.simsetup:SetupWorkChain" + } + } + }, + "commits_count": 99, + "development_status": "alpha", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 7 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 7 + }, + { + "colorclass": "red", + "text": "Data", + "count": 6 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/jimboid/aiida-gromacs", + "is_installable": "True" + }, + "aiida-grouppathx": { + "code_home": "https://github.com/zhubonan/aiida-grouppathx", + "development_status": "beta", + "entry_point_prefix": "grouppathx", + "pip_url": "aiida-grouppathx", + "name": "aiida-grouppathx", + "package_name": "aiida_grouppathx", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin provides the GroupPathX class", + "author_email": "Bonan Zhu ", + "classifiers": [ + "Development Status :: 3 - Alpha", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python" + ], + "version": "0.2.0" + }, + "aiida_version": ">=1.6.4,<3", + "entry_points": { + "aiida.cmdline.data": { + "gpx": "aiida_grouppathx.cli:grouppathx_cli" + } + }, + "commits_count": 7, + "summaryinfo": [ + { + "colorclass": "orange", + "text": "Other (Data commands)", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-grouppathx", + "is_installable": "True" + }, + "aiida-gudhi": { + "code_home": "https://github.com/ltalirz/aiida-gudhi", + "development_status": "beta", + "entry_point_prefix": "gudhi", + "pip_url": "aiida-gudhi", + "plugin_info": "https://raw.github.com/ltalirz/aiida-gudhi/master/setup.json", + "name": "aiida-gudhi", + "package_name": "aiida_gudhi", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for the [GUDHI](http://gudhi.gforge.inria.fr/) library for topological data analysis.", + "author": "Leopold Talirz", + "author_email": "leopold.talirz@gmail.com", + "license": "MIT", + "home_page": "https://github.com/ltalirz/aiida-gudhi", + "classifiers": [ + "Programming Language :: Python" + ], + "version": "0.1.0a3" + }, + "aiida_version": "*", + "entry_points": { + "aiida.calculations": { + "gudhi.rdm": "aiida_gudhi.calculations.rips:RipsDistanceMatrixCalculation" + }, + "aiida.data": { + "gudhi.rdm": "aiida_gudhi.data.rips:RipsDistanceMatrixParameters" + }, + "aiida.parsers": { + "gudhi.rdm": "aiida_gudhi.parsers.rips:RipsParser" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 1 + } + ], + "pip_install_cmd": "pip install --pre aiida-gudhi", + "is_installable": "True" + }, + "aiida-gulp": { + "code_home": "https://github.com/aiidaplugins/aiida-gulp", + "development_status": "beta", + "documentation_url": "https://aiida-gulp.readthedocs.io", + "entry_point_prefix": "gulp", + "pip_url": "aiida-gulp", + "plugin_info": "https://raw.githubusercontent.com/aiidaplugins/aiida-gulp/master/setup.json", + "name": "aiida-gulp", + "package_name": "aiida_gulp", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for running the GULP MD code", + "author": "Chris Sewell", + "author_email": "chrisj_sewell@hotmail.com", + "license": "MIT", + "home_page": "https://github.com/chrisjsewell/aiida-gulp", + "classifiers": [ + "Framework :: AiiDA", + "Programming Language :: Python", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.6", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "0.10.0b5" + }, + "aiida_version": "1.0.0b5", + "entry_points": { + "aiida.calculations": { + "gulp.fitting": "aiida_gulp.calculations.gulp_fitting:GulpFittingCalculation", + "gulp.optimize": "aiida_gulp.calculations.gulp_optimize:GulpOptCalculation", + "gulp.single": "aiida_gulp.calculations.gulp_single:GulpSingleCalculation" + }, + "aiida.cmdline.data": { + "gulp.potentials": "aiida_gulp.cmndline.potentials:potentials" + }, + "aiida.data": { + "gulp.potential": "aiida_gulp.data.potential:EmpiricalPotential", + "gulp.symmetry": "aiida_gulp.data.symmetry:SymmetryData" + }, + "aiida.parsers": { + "gulp.fitting": "aiida_gulp.parsers.parse_fitting:GulpFittingParser", + "gulp.optimize": "aiida_gulp.parsers.parse_opt:GulpOptParser", + "gulp.single": "aiida_gulp.parsers.parse_single:GulpSingleParser" + }, + "aiida.workflows": {}, + "console_scripts": { + "gulp_mock": "aiida_gulp.tests.mock_gulp:main" + }, + "gulp.potentials": { + "lj": "aiida_gulp.potentials.lj:PotentialWriterLJ", + "reaxff": "aiida_gulp.potentials.reaxff:PotentialWriterReaxff" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 3 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 3 + }, + { + "colorclass": "red", + "text": "Data", + "count": 2 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Data commands, Gulp potentials)", + "count": 3 + } + ], + "pip_install_cmd": "pip install --pre aiida-gulp", + "is_installable": "True" + }, + "aiida-kkr": { + "code_home": "https://github.com/JuDFTteam/aiida-kkr/tree/develop", + "development_status": "stable", + "documentation_url": "https://aiida-kkr.readthedocs.io/", + "entry_point_prefix": "kkr", + "pip_url": "aiida-kkr", + "name": "aiida-kkr", + "package_name": "aiida_kkr", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for the JuKKR codes", + "author_email": "Philipp Ruessmann , Jens Broeder , Fabian Bertoldo ", + "classifiers": [ + "Development Status :: 4 - Beta", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "2.0.0" + }, + "aiida_version": null, + "entry_points": {}, + "commits_count": 90, + "summaryinfo": [], + "pip_install_cmd": "pip install aiida-kkr", + "is_installable": "True" + }, + "aiida-lammps": { + "code_home": "https://github.com/aiidaplugins/aiida-lammps", + "development_status": "beta", + "entry_point_prefix": "lammps", + "pip_url": "git+https://github.com/aiidaplugins/aiida-lammps", + "name": "aiida-lammps", + "package_name": "aiida_lammps", + "hosted_on": "github.com", + "metadata": { + "author": "Abel Carreras, Chris Sewell", + "author_email": "chrisj_sewell@hotmail.com", + "version": "0.8.0", + "description": "AiiDA plugin for LAMMPS", + "classifiers": [ + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Scientific/Engineering :: Physics", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.4.0,<2.0.0", + "entry_points": { + "aiida.calculations": { + "lammps.combinate": "aiida_lammps.calculations.lammps.combinate:CombinateCalculation", + "lammps.force": "aiida_lammps.calculations.lammps.force:ForceCalculation", + "lammps.md": "aiida_lammps.calculations.lammps.md:MdCalculation", + "lammps.md.multi": "aiida_lammps.calculations.lammps.md_multi:MdMultiCalculation", + "lammps.optimize": "aiida_lammps.calculations.lammps.optimize:OptimizeCalculation", + "dynaphopy": "aiida_lammps.calculations.dynaphopy: DynaphopyCalculation" + }, + "aiida.parsers": { + "lammps.force": "aiida_lammps.parsers.lammps.force:ForceParser", + "lammps.md": "aiida_lammps.parsers.lammps.md:MdParser", + "lammps.md.multi": "aiida_lammps.parsers.lammps.md_multi:MdMultiParser", + "lammps.optimize": "aiida_lammps.parsers.lammps.optimize:OptimizeParser", + "dynaphopy": "aiida_lammps.parsers.dynaphopy: DynaphopyParser" + }, + "aiida.data": { + "lammps.potential": "aiida_lammps.data.potential:EmpiricalPotential", + "lammps.trajectory": "aiida_lammps.data.trajectory:LammpsTrajectory" + }, + "lammps.potentials": { + "eam": "aiida_lammps.data.pot_plugins.eam:EAM", + "lennard_jones": "aiida_lammps.data.pot_plugins.lennard_jones:LennardJones", + "reaxff": "aiida_lammps.data.pot_plugins.reaxff:Reaxff", + "tersoff": "aiida_lammps.data.pot_plugins.tersoff:Tersoff" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 6 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 5 + }, + { + "colorclass": "red", + "text": "Data", + "count": 2 + }, + { + "colorclass": "orange", + "text": "Other (Lammps potentials)", + "count": 4 + } + ], + "pip_install_cmd": "pip install git+https://github.com/aiidaplugins/aiida-lammps", + "is_installable": "True" + }, + "aiida-lsmo": { + "code_home": "https://github.com/lsmo-epfl/aiida-lsmo", + "development_status": "stable", + "entry_point_prefix": "lsmo", + "pip_url": "git+https://github.com/lsmo-epfl/aiida-lsmo", + "name": "aiida-lsmo", + "package_name": "aiida_lsmo", + "hosted_on": "github.com", + "metadata": { + "author": "Aliaksandr Yakutovich, Daniele Ongari, Leopold Talirz", + "author_email": "aliaksandr.yakutovich@epfl.ch", + "version": "1.0.0", + "description": "AiiDA workflows for the LSMO laboratory at EPFL", + "classifiers": [ + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7" + ] + }, + "aiida_version": ">=1.0.0", + "entry_points": { + "aiida.calculations": { + "lsmo.ff_builder": { + "description": [ + "AiiDA calcfunction to assemble force filed parameters into SinglefileData for Raspa." + ], + "spec": { + "inputs": [ + { + "name": "params", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "cif_molecule", + "required": false, + "valid_types": "Data, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.calcfunctions:ff_builder" + }, + "lsmo.calc_ch4_working_cap": { + "description": [ + "Compute the CH4 working capacity from the output_parameters Dict of IsothermWorkChain.", + " This must have run calculations at 5.8 and 65.0 bar (at 298K), which are the standard reference for the evaluation.", + "", + " The results can be compared with Simon2015 (10.1039/C4EE03515A)." + ], + "spec": { + "inputs": [ + { + "name": "isot_dict", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.calcfunctions:calc_ch4_working_cap" + }, + "lsmo.calc_h2_working_cap": { + "description": [ + "Compute the H2 working capacity from the output_parameters Dict of MultiTempIsothermWorkChain.", + " This must have run calculations at 1, 5 and 100 bar at 77, 198, 298 K.", + " The US DOE Target for the Onboard Storage of Hydrogen Vehicles set the bar to 4.5 wt% and 30 g/L (Kapelewski2018).", + " Case-A: near-ambient-T adsorption, 100bar/198K to 5bar/298K (cf. Kapelewski2018, 10.1021/acs.chemmater.8b03276)", + " ....... Ni2(m-dobdc), experimental: 23.0 g/L", + " Case-B: low T adsorption, 100-5bar at 77K (cf. Ahmed2019, 10.1038/s41467-019-09365-w)", + " ....... NU-100, best experimental: 35.5 g/L", + " Case-C: low T adsorption at low discharge, 100-1bar at 77K (cf. Thornton2017, 10.1021/acs.chemmater.6b04933)", + " ....... hypMOF-5059389, best simulated: 40.0 g/L" + ], + "spec": { + "inputs": [ + { + "name": "isotmt_dict", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.calcfunctions:calc_h2_working_cap" + }, + "lsmo.calc_o2_working_cap": { + "description": [ + "Compute the O2 working capacity from the output_parameters Dict of IsothermWorkChain.", + " This must have run calculations at 5 and 140.0 bar (at 298K), to be consistent with the screening of Moghadam2018", + " (10.1038/s41467-018-03892-8), for which the MOF ANUGIA (UMCM-152) was found to have a volumetric working capacity", + " of 249 vSTP/v (simulations are nearly identical to experiments).", + " Consider that, at the same conditions, an empty thank can only store 136 vSTP/v, and a comparable working capacity", + " can only br obtained compressing till 300bar." + ], + "spec": { + "inputs": [ + { + "name": "isot_dict", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.calcfunctions:calc_o2_working_cap" + }, + "lsmo.calc_selectivity": { + "description": [ + "Compute the selectivity of gas A on gas B as S = kH_a/kH_b.", + " Note that if the material is not porous to one of the materials, the result is simply {'is_porous': False}.", + " To maintain the comptaibility with v1, intead of checking 'is_porous', it checks for the henry_coefficient_average", + " key in the Dict." + ], + "spec": { + "inputs": [ + { + "name": "isot_dict_a", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "isot_dict_b", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.calcfunctions:calc_selectivity" + } + }, + "aiida.parsers": { + "lsmo.cp2k_bsse_parser": "aiida_lsmo.parsers:Cp2kBsseParser", + "lsmo.cp2k_advanced_parser": "aiida_lsmo.parsers:Cp2kAdvancedParser" + }, + "aiida.workflows": { + "lsmo.binding_site": { + "description": [ + "A workchain that combines SimAnnealing & Cp2kBindingEnergy" + ], + "spec": { + "inputs": [ + { + "name": "cp2k_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "molecule", + "required": true, + "valid_types": "Str, Dict", + "info": "Adsorbate molecule: settings to be read from the yaml.Advanced: input a Dict for non-standard settings." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Parameters for the SimAnnealing workchain: will be merged with default ones." + }, + { + "name": "raspa_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "Adsorbent framework CIF." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "protocol_modify", + "required": false, + "valid_types": "Dict", + "info": "Specify custom settings that overvrite the yaml settings" + }, + { + "name": "protocol_tag", + "required": false, + "valid_types": "Str", + "info": "The tag of the protocol tag.yaml. NOTE: only the settings are read, stage is set to GEO_OPT." + }, + { + "name": "protocol_yaml", + "required": false, + "valid_types": "SinglefileData", + "info": "Specify a custom yaml file. NOTE: only the settings are read, stage is set to GEO_OPT." + }, + { + "name": "starting_settings_idx", + "required": false, + "valid_types": "Int", + "info": "If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}" + } + ], + "outputs": [ + { + "name": "dft", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "ff", + "required": true, + "valid_types": "", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.workchains:BindingSiteWorkChain" + }, + "lsmo.cp2k_binding_energy": { + "description": [ + "Submits Cp2kBase work chain for structure + molecule system, first optimizing the geometry of the molecule and", + " later computing the BSSE corrected interaction energy.", + " This work chain is inspired to Cp2kMultistage, and shares some logics and data from it." + ], + "spec": { + "inputs": [ + { + "name": "cp2k_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "molecule", + "required": true, + "valid_types": "StructureData", + "info": "Input molecule in the unit cell of the structure." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Input structure that contains the molecule." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "protocol_modify", + "required": false, + "valid_types": "Dict", + "info": "Specify custom settings that overvrite the yaml settings" + }, + { + "name": "protocol_tag", + "required": false, + "valid_types": "Str", + "info": "The tag of the protocol tag.yaml. NOTE: only the settings are read, stage is set to GEO_OPT." + }, + { + "name": "protocol_yaml", + "required": false, + "valid_types": "SinglefileData", + "info": "Specify a custom yaml file. NOTE: only the settings are read, stage is set to GEO_OPT." + }, + { + "name": "starting_settings_idx", + "required": false, + "valid_types": "Int", + "info": "If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}" + } + ], + "outputs": [ + { + "name": "loaded_molecule", + "required": true, + "valid_types": "StructureData", + "info": "Molecule geometry in the unit cell." + }, + { + "name": "loaded_structure", + "required": true, + "valid_types": "StructureData", + "info": "Geometry of the system with both fragments." + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "Info regarding the binding energy of the system." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 901, + "message": "Specified starting_settings_idx that is not existing, or any in between 0 and idx is missing" + }, + { + "status": 902, + "message": "Settings for Stage0 are not ok but there are no more robust settings to try" + }, + { + "status": 903, + "message": "Something important was not printed correctly and the parsing of the first calculation failed" + } + ] + }, + "class": "aiida_lsmo.workchains.cp2k_binding_energy:Cp2kBindingEnergyWorkChain" + }, + "lsmo.cp2k_multistage": { + "description": [ + "Submits Cp2kBase workchains for ENERGY, GEO_OPT, CELL_OPT and MD jobs iteratively", + " The protocol_yaml file contains a series of settings_x and stage_x:", + " the workchains starts running the settings_0/stage_0 calculation, and, in case of a failure, changes the settings", + " untill the SCF of stage_0 converges. Then it uses the same settings to run the next stages (i.e., stage_1, etc.)." + ], + "spec": { + "inputs": [ + { + "name": "cp2k_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "min_cell_size", + "required": false, + "valid_types": "Float", + "info": "To avoid using k-points, extend the cell so that min(perp_width)>min_cell_size" + }, + { + "name": "parent_calc_folder", + "required": false, + "valid_types": "RemoteData", + "info": "Provide an initial parent folder that contains the wavefunction for restart" + }, + { + "name": "protocol_modify", + "required": false, + "valid_types": "Dict", + "info": "Specify custom settings that overvrite the yaml settings" + }, + { + "name": "protocol_tag", + "required": false, + "valid_types": "Str", + "info": "The tag of the protocol to be read from {tag}.yaml unless protocol_yaml input is specified" + }, + { + "name": "protocol_yaml", + "required": false, + "valid_types": "SinglefileData", + "info": "Specify a custom yaml file with the multistage settings (and ignore protocol_tag)" + }, + { + "name": "starting_settings_idx", + "required": false, + "valid_types": "Int", + "info": "If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}" + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "Input structure" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "last_input_parameters", + "required": false, + "valid_types": "Dict", + "info": "CP2K input parameters used (and possibly working) used in the last stage" + }, + { + "name": "output_parameters", + "required": false, + "valid_types": "Dict", + "info": "Output CP2K parameters of all the stages, merged together" + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "Processed structure (missing if only ENERGY calculation is performed)" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 901, + "message": "Specified starting_settings_idx that is not existing, or any in between 0 and idx is missing" + }, + { + "status": 902, + "message": "Settings for Stage0 are not ok but there are no more robust settings to try" + }, + { + "status": 903, + "message": "Something important was not printed correctly and the parsing of the first calculation failed" + } + ] + }, + "class": "aiida_lsmo.workchains:Cp2kMultistageWorkChain" + }, + "lsmo.cp2k_multistage_ddec": { + "description": [ + "A workchain that combines: Cp2kMultistageWorkChain + Cp2kDdecWorkChain" + ], + "spec": { + "inputs": [ + { + "name": "cp2k_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "ddec", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "min_cell_size", + "required": false, + "valid_types": "Float", + "info": "To avoid using k-points, extend the cell so that min(perp_width)>min_cell_size" + }, + { + "name": "parent_calc_folder", + "required": false, + "valid_types": "RemoteData", + "info": "Provide an initial parent folder that contains the wavefunction for restart" + }, + { + "name": "protocol_modify", + "required": false, + "valid_types": "Dict", + "info": "Specify custom settings that overvrite the yaml settings" + }, + { + "name": "protocol_tag", + "required": false, + "valid_types": "Str", + "info": "The tag of the protocol to be read from {tag}.yaml unless protocol_yaml input is specified" + }, + { + "name": "protocol_yaml", + "required": false, + "valid_types": "SinglefileData", + "info": "Specify a custom yaml file with the multistage settings (and ignore protocol_tag)" + }, + { + "name": "starting_settings_idx", + "required": false, + "valid_types": "Int", + "info": "If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}" + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "Input structure" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "structure_ddec", + "required": true, + "valid_types": "CifData", + "info": "structure with DDEC charges" + }, + { + "name": "last_input_parameters", + "required": false, + "valid_types": "Dict", + "info": "CP2K input parameters used (and possibly working) used in the last stage" + }, + { + "name": "output_parameters", + "required": false, + "valid_types": "Dict", + "info": "Output CP2K parameters of all the stages, merged together" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.workchains:Cp2kMultistageDdecWorkChain" + }, + "lsmo.isotherm": { + "description": [ + "Workchain that computes volpo and blocking spheres: if accessible volpo>0", + " it also runs a raspa widom calculation for the Henry coefficient." + ], + "spec": { + "inputs": [ + { + "name": "molecule", + "required": true, + "valid_types": "Str, Dict", + "info": "Adsorbate molecule: settings to be read from the yaml.Advanced: input a Dict for non-standard settings." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Parameters for the Isotherm workchain (see workchain.schema for default values)." + }, + { + "name": "raspa_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "Adsorbent framework CIF." + }, + { + "name": "zeopp", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "geometric", + "required": false, + "valid_types": "Dict", + "info": "[Only used by IsothermMultiTempWorkChain] Already computed geometric properties" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "Results of the single temperature wc: keys can vay depending on is_porous and is_kh_enough booleans." + }, + { + "name": "block", + "required": false, + "valid_types": "SinglefileData", + "info": "Blocked pockets fileoutput file." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.workchains:IsothermWorkChain" + }, + "lsmo.isotherm_multi_temp": { + "description": [ + "Run IsothermWorkChain for multiple temperatures: first compute geometric properties", + " and then submit Widom+GCMC at different temperatures in parallel" + ], + "spec": { + "inputs": [ + { + "name": "molecule", + "required": true, + "valid_types": "Str, Dict", + "info": "Adsorbate molecule: settings to be read from the yaml.Advanced: input a Dict for non-standard settings." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Parameters for the Isotherm workchain (see workchain.schema for default values)." + }, + { + "name": "raspa_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "Adsorbent framework CIF." + }, + { + "name": "zeopp", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "geometric", + "required": false, + "valid_types": "Dict", + "info": "[Only used by IsothermMultiTempWorkChain] Already computed geometric properties" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "Results of isotherms run at different temperatures." + }, + { + "name": "block", + "required": false, + "valid_types": "SinglefileData", + "info": "Blocked pockets fileoutput file." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.workchains:IsothermMultiTempWorkChain" + }, + "lsmo.isotherm_calc_pe": { + "description": [ + "Compute CO2 parassitic energy (PE) after running IsothermWorkChain for CO2 and N2 at 300K." + ], + "spec": { + "inputs": [ + { + "name": "raspa_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "Adsorbent framework CIF." + }, + { + "name": "zeopp", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "geometric", + "required": false, + "valid_types": "Dict", + "info": "[Only used by IsothermMultiTempWorkChain] Already computed geometric properties" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict", + "info": "Parameters for Isotherm work chain" + }, + { + "name": "pe_parameters", + "required": false, + "valid_types": "Dict", + "info": "Parameters for PE process modelling" + } + ], + "outputs": [ + { + "name": "co2", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "n2", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "Output parmaters of a calc_PE calculations" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.workchains:IsothermCalcPEWorkChain" + }, + "lsmo.zeopp_multistage_ddec": { + "description": [ + "A workchain that combines: Zeopp + Cp2kMultistageWorkChain + Cp2kDdecWorkChain + Zeopp" + ], + "spec": { + "inputs": [ + { + "name": "cp2k_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "ddec", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "input structure" + }, + { + "name": "zeopp", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "min_cell_size", + "required": false, + "valid_types": "Float", + "info": "To avoid using k-points, extend the cell so that min(perp_width)>min_cell_size" + }, + { + "name": "parent_calc_folder", + "required": false, + "valid_types": "RemoteData", + "info": "Provide an initial parent folder that contains the wavefunction for restart" + }, + { + "name": "protocol_modify", + "required": false, + "valid_types": "Dict", + "info": "Specify custom settings that overvrite the yaml settings" + }, + { + "name": "protocol_tag", + "required": false, + "valid_types": "Str", + "info": "The tag of the protocol to be read from {tag}.yaml unless protocol_yaml input is specified" + }, + { + "name": "protocol_yaml", + "required": false, + "valid_types": "SinglefileData", + "info": "Specify a custom yaml file with the multistage settings (and ignore protocol_tag)" + }, + { + "name": "starting_settings_idx", + "required": false, + "valid_types": "Int", + "info": "If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "structure_ddec", + "required": true, + "valid_types": "CifData", + "info": "structure with DDEC charges" + }, + { + "name": "zeopp_after_opt", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "zeopp_before_opt", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "last_input_parameters", + "required": false, + "valid_types": "Dict", + "info": "CP2K input parameters used (and possibly working) used in the last stage" + }, + { + "name": "output_parameters", + "required": false, + "valid_types": "Dict", + "info": "Output CP2K parameters of all the stages, merged together" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.workchains:ZeoppMultistageDdecWorkChain" + }, + "lsmo.sim_annealing": { + "description": [ + "A work chain to compute the minimum energy geometry of a molecule inside a framework, using simulated annealing,", + " i.e., decreasing the temperature of a Monte Carlo simulation and finally running and energy minimization step." + ], + "spec": { + "inputs": [ + { + "name": "molecule", + "required": true, + "valid_types": "Str, Dict", + "info": "Adsorbate molecule: settings to be read from the yaml.Advanced: input a Dict for non-standard settings." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Parameters for the SimAnnealing workchain: will be merged with default ones." + }, + { + "name": "raspa_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "Adsorbent framework CIF." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "loaded_molecule", + "required": true, + "valid_types": "CifData", + "info": "CIF containing the final postition of the molecule." + }, + { + "name": "loaded_structure", + "required": true, + "valid_types": "CifData", + "info": "CIF containing the loaded structure." + }, + { + "name": "output_parameters", + "required": false, + "valid_types": "Dict", + "info": "Information about the final configuration." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.workchains.sim_annealing:SimAnnealingWorkChain" + }, + "lsmo.nanoporous_screening_1": { + "description": [ + "A workchain that combines: ZeoppMultistageDdecWorkChain wc1 and IsothermCalcPEWorkChain wc2.", + " In future I will use this to include more applications to run in parallel." + ], + "spec": { + "inputs": [ + { + "name": "cp2k_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "ddec", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "raspa_base", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "input structure" + }, + { + "name": "zeopp", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "geometric", + "required": false, + "valid_types": "Dict", + "info": "[Only used by IsothermMultiTempWorkChain] Already computed geometric properties" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "min_cell_size", + "required": false, + "valid_types": "Float", + "info": "To avoid using k-points, extend the cell so that min(perp_width)>min_cell_size" + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict", + "info": "Parameters for Isotherm work chain" + }, + { + "name": "parent_calc_folder", + "required": false, + "valid_types": "RemoteData", + "info": "Provide an initial parent folder that contains the wavefunction for restart" + }, + { + "name": "pe_parameters", + "required": false, + "valid_types": "Dict", + "info": "Parameters for PE process modelling" + }, + { + "name": "protocol_modify", + "required": false, + "valid_types": "Dict", + "info": "Specify custom settings that overvrite the yaml settings" + }, + { + "name": "protocol_tag", + "required": false, + "valid_types": "Str", + "info": "The tag of the protocol to be read from {tag}.yaml unless protocol_yaml input is specified" + }, + { + "name": "protocol_yaml", + "required": false, + "valid_types": "SinglefileData", + "info": "Specify a custom yaml file with the multistage settings (and ignore protocol_tag)" + }, + { + "name": "starting_settings_idx", + "required": false, + "valid_types": "Int", + "info": "If idx>0 is chosen, jumps directly to overwrite settings_0 with settings_{idx}" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_lsmo.workchains:NanoporousScreening1WorkChain" + } + } + }, + "commits_count": 17, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 5 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 2 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 10 + } + ], + "pip_install_cmd": "pip install git+https://github.com/lsmo-epfl/aiida-lsmo", + "is_installable": "True" + }, + "aiida-metavo-scheduler": { + "code_home": "https://github.com/pzarabadip/aiida-metavo-scheduler", + "development_status": "stable", + "entry_point_prefix": "metavo_scheduler", + "pip_url": "git+https://github.com/pzarabadip/aiida-metavo-scheduler", + "name": "aiida-metavo-scheduler", + "package_name": "aiida_metavo_scheduler", + "hosted_on": "github.com", + "metadata": { + "author": "Pezhman Zarabadi-Poor", + "author_email": "pzarabadip@gmail.com", + "version": "1.0.0", + "description": "", + "classifiers": [ + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Topic :: Scientific/Engineering" + ] + }, + "aiida_version": ">=1.0.0,<1.6", + "entry_points": { + "aiida.cmdline.computer.configure": { + "sshmetavo": "aiida_metavo_scheduler.metavo.ssh_metavo:CONFIGURE_SSH_CMD" + }, + "aiida.schedulers": { + "pbsprometavo": "aiida_metavo_scheduler.metavo.pbspro_metavo:PbsproSchedulerMetaVO" + }, + "aiida.transports": { + "sshmetavo": "aiida_metavo_scheduler.metavo.ssh_metavo:SshTransport" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "orange", + "text": "Other (Cmdline computer configure, Schedulers, Transports)", + "count": 3 + } + ], + "pip_install_cmd": "pip install git+https://github.com/pzarabadip/aiida-metavo-scheduler", + "is_installable": "False" + }, + "aiida-mpds": { + "code_home": "https://github.com/mpds-io/mpds-aiida", + "development_status": "beta", + "documentation_url": "https://github.com/mpds-io/mpds-aiida", + "entry_point_prefix": "mpds", + "pip_url": "git+https://github.com/mpds-io/mpds-aiida", + "name": "aiida-mpds", + "package_name": "aiida_mpds", + "hosted_on": "github.com", + "metadata": { + "author": "Andrey Sobolev", + "author_email": "as@tilde.pro", + "version": "", + "description": "Aiida workflows for MPDS based on CRYSTAL", + "classifiers": [ + "Programming Language :: Python", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Scientific/Engineering :: Physics", + "Topic :: Scientific/Engineering :: Information Analysis", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.0.1", + "entry_points": { + "aiida.workflows": { + "crystal.mpds": "mpds_aiida.workflows.mpds:MPDSStructureWorkchain", + "crystal.cif": "mpds_aiida.workflows.cif:CIFStructureWorkchain", + "crystal.aiida": "mpds_aiida.workflows.aiida:AiidaStructureWorkchain" + } + }, + "commits_count": 9, + "summaryinfo": [ + { + "colorclass": "green", + "text": "Workflows", + "count": 3 + } + ], + "pip_install_cmd": "pip install git+https://github.com/mpds-io/mpds-aiida", + "is_installable": "False" + }, + "aiida-muon": { + "entry_point_prefix": "muon", + "code_home": "https://github.com/positivemuon/aiida-muon", + "version_file": "https://github.com/positivemuon/aiida-muon/blob/main/aiida_muon/__init__.py", + "pip_url": "git+https://github.com/positivemuon/aiida-muon", + "name": "aiida-muon", + "package_name": "aiida_muon", + "hosted_on": "github.com", + "metadata": { + "description": "aiida-muon is allows to find candiate muon implantation sites and hyperfine field by DFT supercell relaxations and from further symmetry and kinetics analysis. ", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Development Status :: 2 - Pre-Alpha", + "Framework :: AiiDA" + ], + "author": "Muon group Parma" + }, + "aiida_version": ">=2.0,<3", + "entry_points": { + "aiida.workflows": { + "muon.find_muon": { + "description": [ + "FindMuonWorkChain finds the candidate implantation site for a positive muon.", + " It first performs DFT relaxation calculations for a set of initial muon sites.", + " It then analyzes the results of these calculations and finds candidate muon sites.", + " If there are magnetic inequivalent sites not initially, they are recalculated", + " It further calculates the muon contact hyperfine field at these candidate sites." + ], + "spec": { + "inputs": [ + { + "name": "sc_matrix", + "required": true, + "valid_types": "List", + "info": " List of length 1 for supercell size " + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Input initial structure" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "mu_spacing", + "required": false, + "valid_types": "Float, NoneType", + "info": "Minimum distance in Angstrom between two starting muon positions generated on a grid." + }, + { + "name": "qe", + "required": false, + "valid_types": "", + "info": "Input parameters, settings and options for QE DFT calculations" + } + ], + "outputs": [ + { + "name": "all_index_uuid", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "all_sites", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "unique_sites", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "unique_sites_dipolar", + "required": false, + "valid_types": "List", + "info": "" + }, + { + "name": "unique_sites_hyperfine", + "required": false, + "valid_types": "Dict", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 405, + "message": "One of the PwRelaxWorkChain subprocesses failed" + }, + { + "status": 406, + "message": "One of the PwBaseWorkChain subprocesses failed" + }, + { + "status": 407, + "message": "One of the PPWorkChain subprocesses failed" + } + ] + }, + "class": "aiida_muon.workflows.find_muon:FindMuonWorkChain" + } + } + }, + "commits_count": 21, + "development_status": "pre-alpha", + "summaryinfo": [ + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/positivemuon/aiida-muon", + "is_installable": "True" + }, + "aiida-musconv": { + "entry_point_prefix": "musconv", + "code_home": "https://github.com/positivemuon/aiida-musconv", + "version_file": "https://github.com/positivemuon/aiida-musconv/blob/main/aiida_musconv/__init__.py", + "pip_url": "git+https://github.com/positivemuon/aiida-musconv", + "name": "aiida-musconv", + "package_name": "aiida_musconv", + "hosted_on": "github.com", + "metadata": { + "description": "aiida-musconv is a plugin that allows to obtain converged supercell size for an interstitial impurity calculation.", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Development Status :: 2 - Pre-Alpha", + "Framework :: AiiDA" + ], + "author": "Muon group Parma" + }, + "aiida_version": ">=2.0,<3", + "entry_points": { + "aiida.workflows": { + "musconv": { + "description": [ + "WorkChain for finding converged supercell for interstitial impurity calculation" + ], + "spec": { + "inputs": [ + { + "name": "pwscf", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Input initial structure" + }, + { + "name": "kpoints_distance", + "required": false, + "valid_types": "Float, NoneType", + "info": "The minimum desired distance in 1/\u00c5 between k-points in reciprocal space." + }, + { + "name": "max_iter_num", + "required": false, + "valid_types": "Int, NoneType", + "info": "Maximum number of iteration in the supercell convergence loop" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "min_length", + "required": false, + "valid_types": "Float, NoneType", + "info": "The minimum length of the smallest lattice vector for the first generated supercell " + }, + { + "name": "pseudofamily", + "required": false, + "valid_types": "Str, NoneType", + "info": "The label of the pseudo family" + } + ], + "outputs": [ + { + "name": "Converged_SCmatrix", + "required": true, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "Converged_supercell", + "required": true, + "valid_types": "StructureData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 402, + "message": "one of the PwCalculation subprocesses failed" + }, + { + "status": 702, + "message": "Max number of supercell convergence reached " + }, + { + "status": 704, + "message": "Error in fitting the forces to an exponential" + } + ] + }, + "class": "aiida_musconv.workflows.musconv:MusconvWorkChain" + } + } + }, + "commits_count": 43, + "development_status": "pre-alpha", + "summaryinfo": [ + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/positivemuon/aiida-musconv", + "is_installable": "True" + }, + "aiida-nanotech-empa": { + "code_home": "https://github.com/nanotech-empa/aiida-nanotech-empa", + "development_status": "beta", + "entry_point_prefix": "nanotech_empa", + "pip_url": "git+https://github.com/nanotech-empa/aiida-nanotech-empa", + "name": "aiida-nanotech-empa", + "package_name": "aiida_nanotech_empa", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": 30, + "summaryinfo": [], + "pip_install_cmd": "pip install git+https://github.com/nanotech-empa/aiida-nanotech-empa", + "is_installable": "False" + }, + "aiida-nims-scheduler": { + "code_home": "https://github.com/atztogo/aiida-nims-scheduler", + "development_status": "stable", + "documentation_url": "https://github.com/atztogo/aiida-nims-scheduler", + "entry_point_prefix": "nims_scheduler", + "pip_url": "git+https://github.com/atztogo/aiida-nims-scheduler", + "name": "aiida-nims-scheduler", + "package_name": "aiida_nims_scheduler", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": 21, + "summaryinfo": [], + "pip_install_cmd": "pip install git+https://github.com/atztogo/aiida-nims-scheduler", + "is_installable": "True" + }, + "aiida-nwchem": { + "code_home": "https://github.com/aiidateam/aiida-nwchem", + "documentation_url": "https://aiida-nwchem.readthedocs.io/", + "entry_point_prefix": "nwchem", + "pip_url": "aiida-nwchem", + "plugin_info": "https://raw.githubusercontent.com/aiidateam/aiida-nwchem/master/setup.json", + "name": "aiida-nwchem", + "package_name": "aiida_nwchem", + "hosted_on": "github.com", + "metadata": { + "description": "The official AiiDA plugin for NWChem", + "author_email": "The AiiDA team ", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering" + ], + "version": "3.0.0" + }, + "aiida_version": ">=2.0,<3.0", + "entry_points": { + "aiida.calculations": { + "nwchem.base": { + "description": [ + "Base calculation class for NWChem." + ], + "spec": { + "inputs": [ + { + "name": "input_file", + "required": true, + "valid_types": "SinglefileData", + "info": "NWChem input file" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData, FolderData, NoneType", + "info": "Remote directory of a completed NWChem calculation to restart from." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The relaxed output structure." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Required output files are missing." + }, + { + "status": 301, + "message": "The retrieved temporary folder could not be accessed." + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 310, + "message": "The stdout output file could not be read." + }, + { + "status": 312, + "message": "The stdout output file was incomplete." + }, + { + "status": 313, + "message": "The stdout contains multiple calculations" + }, + { + "status": 340, + "message": "The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception." + } + ] + }, + "class": "aiida_nwchem.calculations.nwchem:NwchemBaseCalculation" + }, + "nwchem.nwchem": { + "description": [ + "Base calculation class for NWChem.", + "", + " Synthesizes NWChem input file from parameter dictionary and StructureData." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Input parameters" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure, with or without a cell" + }, + { + "name": "add_cell", + "required": false, + "valid_types": "Bool", + "info": "The input structure, with or without a cell" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData, FolderData, NoneType", + "info": "Remote directory of a completed NWChem calculation to restart from." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The relaxed output structure." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Required output files are missing." + }, + { + "status": 301, + "message": "The retrieved temporary folder could not be accessed." + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 310, + "message": "The stdout output file could not be read." + }, + { + "status": 312, + "message": "The stdout output file was incomplete." + }, + { + "status": 313, + "message": "The stdout contains multiple calculations" + }, + { + "status": 340, + "message": "The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception." + } + ] + }, + "class": "aiida_nwchem.calculations.nwchem:NwchemCalculation" + } + }, + "aiida.parsers": { + "nwchem.nwchem": "aiida_nwchem.parsers.nwchem:NwchemBaseParser" + }, + "aiida.workflows": { + "nwchem.base": { + "description": [ + "Workchain to run an NWChem calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "nwchem", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The relaxed output structure." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_nwchem.workflows.base:NwchemBaseWorkChain" + } + } + }, + "commits_count": 20, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 2 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-nwchem", + "is_installable": "True" + }, + "aiida-open_circuit_voltage": { + "entry_point_prefix": "quantumespresso.ocv", + "code_home": "https://github.com/tsthakur/aiida-open_circuit_voltage", + "name": "aiida-open_circuit_voltage", + "package_name": "aiida_open_circuit_voltage", + "hosted_on": "github.com", + "metadata": { + "author": "Tushar Thakur", + "author_email": "tushar.thakur@epfl.ch", + "version": "0.1.0", + "description": "The AiiDA plugin to calculate ocv at various charge of states using QE", + "classifiers": [ + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Development Status :: 3 - Alpha", + "Natural Language :: English", + "Intended Audience :: Science/Research" + ] + }, + "aiida_version": ">=1.1.0,<2.0.0", + "entry_points": { + "aiida.workflows": { + "quantumespresso.ocv.ocvwc": "aiida_open_circuit_voltage.workflows.workchain:OCVWorkChain" + } + }, + "commits_count": 27, + "development_status": "alpha", + "summaryinfo": [ + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "See source code repository." + }, + "aiida-optimize": { + "code_home": "https://github.com/greschd/aiida-optimize", + "documentation_url": "https://aiida-optimize.readthedocs.io", + "entry_point_prefix": "optimize", + "pip_url": "aiida-optimize", + "plugin_info": "https://raw.githubusercontent.com/greschd/aiida-optimize/master/setup.json", + "name": "aiida-optimize", + "package_name": "aiida_optimize", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA Plugin for running optimization algorithms.", + "author": "Dominik Gresch", + "author_email": "greschd@gmx.ch", + "license": "Apache 2.0", + "home_page": "https://aiida-optimize.readthedocs.io/", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "1.0.2" + }, + "aiida_version": ">=2.0.0,<3.0.0", + "entry_points": { + "aiida.workflows": { + "optimize.optimize": { + "description": [ + "Runs an optimization procedure, given an optimization engine that defines the optimization", + " algorithm, and a process which evaluates the function to be optimized." + ], + "spec": { + "inputs": [ + { + "name": "engine", + "required": true, + "valid_types": "Str", + "info": "Engine that runs the optimization." + }, + { + "name": "engine_kwargs", + "required": true, + "valid_types": "Dict", + "info": "Keyword arguments passed to the optimization engine." + }, + { + "name": "evaluate_process", + "required": true, + "valid_types": "Str", + "info": "Process which produces the result to be optimized." + }, + { + "name": "evaluate", + "required": false, + "valid_types": "", + "info": "Inputs that are passed to all evaluation processes." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "optimal_process_output", + "required": true, + "valid_types": "", + "info": "Output value of the optimal evaluation process." + }, + { + "name": "optimal_process_uuid", + "required": true, + "valid_types": "", + "info": "UUID of the optimal evaluation process." + }, + { + "name": "engine_outputs", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "optimal_process_input", + "required": false, + "valid_types": "", + "info": "Input value of the optimal evaluation process." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 201, + "message": "Optimization failed because one of the evaluate processes did not finish ok." + }, + { + "status": 202, + "message": "Optimization failed because the engine did not finish ok." + } + ] + }, + "class": "aiida_optimize._optimization_workchain:OptimizationWorkChain" + }, + "optimize.wrappers.add_inputs": { + "description": [ + "Wrapper workchain that takes inputs as keys and values and passes it", + " on to a sub-process. This enables taking a process which was not", + " designed to be used in optimization, and optimize with respect to", + " some arbitrary input. Inputs which always remain the same can be", + " specified in the ``inputs`` namespace, whereas the inputs to be", + " optimized are given through the ``added_input_keys`` and", + " ``added_input_values`` inputs.", + "", + " The outputs of the wrapper workchain are the same as those of", + " the wrapped process.", + "", + " The \"added\" inputs can only be BaseType sub-classes, or", + " attributes of a Dict. For each input, its port location is given", + " in the \"added_input_keys\" input. For example, ``x.y`` would set", + " the ``y`` input in the ``x`` namespace.", + "", + " For cases where the input is a Dict attribute, the (possibly nested) attribute name is given after a colon. That means ``x:a.b`` would", + " set the ``['a']['b']`` attribute of the ``Dict`` given in the ``x``", + " input.", + "", + " In cases where only a single input needs to be added, they can be", + " specified directly instead of wrapped in a List." + ], + "spec": { + "inputs": [ + { + "name": "added_input_keys", + "required": true, + "valid_types": "List, Str", + "info": "Specifies the location of each added input." + }, + { + "name": "added_input_values", + "required": true, + "valid_types": "List, BaseType", + "info": "Values of the added inputs to be passed into the sub-process." + }, + { + "name": "sub_process", + "required": true, + "valid_types": "Str", + "info": "The class of the process that should be wrapped." + }, + { + "name": "inputs", + "required": false, + "valid_types": "", + "info": "Inputs to be passed on to the sub-process." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 201, + "message": "Workchain failed because the sub-process did not finish ok." + } + ] + }, + "class": "aiida_optimize.wrappers._add_inputs:AddInputsWorkChain" + }, + "optimize.wrappers.concatenate": { + "description": [ + "Allows concatenating an arbitrary number of sub-processes.", + "", + " A wrapper workchain that allows concatenating an arbitrary number", + " of sub-processes. Outputs of one processes can be configured to", + " be passed to the next one." + ], + "spec": { + "inputs": [ + { + "name": "output_input_mappings", + "required": true, + "valid_types": "List", + "info": "Defines how inputs are passed between sub-processes. Each list entry entry has the form `((process_label_a, process_label_b), mapping)`, and defines outputs of process A to be passed to process B. The `mapping` values are dictionaries `{'output_name': 'input_name'}` giving the output name (in process A) and input name (in process B) for each value to pass." + }, + { + "name": "process_inputs", + "required": true, + "valid_types": "", + "info": "Inputs which are passed on to the sub-processes. The inputs should be grouped into a namespace identified by the process label." + }, + { + "name": "process_labels", + "required": true, + "valid_types": "List", + "info": "A list of pairs (label, process_name). The labels can be any string, the process_name needs to be loadable by `aiida_optimize.process_inputs.load_object`, and defines which process is being run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "process_outputs", + "required": true, + "valid_types": "", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 200, + "message": "Workchain failed because a sub-process failed." + } + ] + }, + "class": "aiida_optimize.wrappers._concatenate:ConcatenateWorkChain" + }, + "optimize.wrappers.create_evaluate": { + "description": [ + "Wrapper workchain to combine two processes: The first process _creates_", + " a result, and the second _evaluates_ that result.", + "", + " The purpose of this workchain is to facilitate optimization of processes", + " which don't natively produce an output that can be optimized, by only", + " having to add the 'evaluation' part." + ], + "spec": { + "inputs": [ + { + "name": "create", + "required": true, + "valid_types": "", + "info": "Inputs which are passed on to the create sub-process." + }, + { + "name": "create_process", + "required": true, + "valid_types": "Str", + "info": "The sub-process which performs the create step." + }, + { + "name": "evaluate_process", + "required": true, + "valid_types": "Str", + "info": "The sub-process which performs the evaluate step." + }, + { + "name": "output_input_mapping", + "required": true, + "valid_types": "Dict", + "info": "A mapping from output names of the create process to input names of the evaluate process. These outputs (if present) are forwarded to the evaluate process." + }, + { + "name": "evaluate", + "required": false, + "valid_types": "", + "info": "Inputs which are passed on to the evaluate sub-process." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "create", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "evaluate", + "required": true, + "valid_types": "", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 201, + "message": "Workchain failed because the 'create' sub-process failed." + }, + { + "status": 202, + "message": "Workchain failed because the 'evaluate' sub-process failed." + } + ] + }, + "class": "aiida_optimize.wrappers._create_evaluate:CreateEvaluateWorkChain" + } + } + }, + "commits_count": 2, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "green", + "text": "Workflows", + "count": 4 + } + ], + "pip_install_cmd": "pip install aiida-optimize", + "is_installable": "True" + }, + "aiida-orca": { + "code_home": "https://github.com/pzarabadip/aiida-orca", + "development_status": "stable", + "documentation_url": "https://aiida-orca.readthedocs.io/", + "entry_point_prefix": "orca", + "pip_url": "git+https://github.com/pzarabadip/aiida-orca", + "name": "aiida-orca", + "package_name": "aiida_orca", + "hosted_on": "github.com", + "metadata": { + "author": "Pezhman Zarabadi-Poor", + "author_email": "pzarabadip@gmail.com", + "version": "0.5.1", + "description": "AiiDA plugin for ORCA code", + "classifiers": [ + "Environment :: Plugins", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.0.0,<2.0.0", + "entry_points": { + "aiida.calculations": { + "orca_main": "aiida_orca.calculations:OrcaCalculation", + "orca_asa": "aiida_orca.calculations:OrcaAsaCalculation" + }, + "aiida.parsers": { + "orca_base_parser": "aiida_orca.parsers:OrcaBaseParser" + }, + "aiida.workflows": { + "orca.base": { + "description": [ + "Workchain to run a orca calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "orca", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "the results of the calculation" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "relaxed_structure", + "required": false, + "valid_types": "StructureData", + "info": "relaxed structure" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 300, + "message": "The calculation failed with an unidentified unrecoverable error." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 301, + "message": "The calculation failed with an unrecoverable error coming from aiida-orca." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_orca.workchains:OrcaBaseWorkChain" + } + } + }, + "commits_count": 42, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 2 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/pzarabadip/aiida-orca", + "is_installable": "True" + }, + "aiida-phonopy": { + "code_home": "https://github.com/aiida-phonopy/aiida-phonopy", + "documentation_url": "https://aiida-phonopy.readthedocs.io/", + "entry_point_prefix": "phonopy", + "pip_url": "aiida-phonopy", + "plugin_info": "https://raw.githubusercontent.com/aiida-phonopy/aiida-phonopy/master/setup.json", + "name": "aiida-phonopy", + "package_name": "aiida_phonopy", + "hosted_on": "github.com", + "metadata": { + "description": "The official AiiDA plugin for Phonopy", + "author_email": "Lorenzo Bastonero ", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "1.1.3" + }, + "aiida_version": ">=2.0.0,<3.0.0", + "entry_points": { + "aiida.calculations": { + "phonopy.phonopy": { + "description": [ + "Base `CalcJob` implementation for Phonopy post-processing." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Phonopy parameters (`setting tags`) for post processing. The following tags, along their type, are allowed:\nPRIMITIVE_AXES\nPRIMITIVE_AXIS\nEIGENVECTORS\nBAND\nBAND_PATHS\nBAND_POINTS\nBAND_LABELS\nBAND_CONNECTION\nBAND_INDICES\nMESH\nMP\nMESH_NUMBERS\nMP_SHIFT\nGAMMA_CENTER\nWRITE_MESH\nDOS\nDOS_RANGE\nFMIN\nFMAX\nFPITCH\nPDOS\nPROJECTION_DIRECTION\nXYZ_DIRECTION\nSIGMA\nDEBYE_MODEL\nMOMEMT\nMOMENT_ORDER\nTPROP\nTMIN\nTMAX\nTSTEP\nPRETEND_REAL\nCUTOFF_FREQUENCY\nTDISP\nTDISPMAT\nTDISPMAT_CIF\nQPOINTS\nWRITEDM\nNAC_METHOD\nQ_DIRECTION\nGROUP_VELOCITY\nGV_DELTA_Q\nSYMMETRY_TOLERANCE\nSYMMETRY\nMESH_SYMMETRY\nFC_SYMMETRY\nFULL_FORCE_CONSTANTS\nWRITE_FORCE_CONSTANTS\nANIME_TYPE\nANIME\nMODULATION\nIRREPS\nSHOW_IRREPS\nLITTLE_COGROUP" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "force_constants", + "required": false, + "valid_types": "ForceConstantsData, NoneType", + "info": "Force constants of the input structure." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "phonopy_data", + "required": false, + "valid_types": "PhonopyData, NoneType", + "info": "The preprocess output info of a previous ForceConstantsWorkChain." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Settings for phonopy calculation." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "irreducible_representations", + "required": false, + "valid_types": "Dict", + "info": "Irreducible representation output." + }, + { + "name": "modulation", + "required": false, + "valid_types": "Dict", + "info": "Modulation information." + }, + { + "name": "output_force_constants", + "required": false, + "valid_types": "ArrayData", + "info": "Calculated force constants." + }, + { + "name": "output_parameters", + "required": false, + "valid_types": "Dict", + "info": "Sum up info of phonopy calculation." + }, + { + "name": "phonon_bands", + "required": false, + "valid_types": "BandsData", + "info": "Calculated phonon band structure." + }, + { + "name": "projected_phonon_dos", + "required": false, + "valid_types": "XyData", + "info": "Calculated projected DOS." + }, + { + "name": "qpoints", + "required": false, + "valid_types": "BandsData", + "info": "Calculated qpoints." + }, + { + "name": "qpoints_mesh", + "required": false, + "valid_types": "BandsData", + "info": "Calculated qpoint mesh." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "thermal_displacement_matrices", + "required": false, + "valid_types": "Dict", + "info": "Calculated thermal displacements matrices." + }, + { + "name": "thermal_displacements", + "required": false, + "valid_types": "Dict", + "info": "Calculated thermal displacements." + }, + { + "name": "thermal_properties", + "required": false, + "valid_types": "XyData", + "info": "Calculated thermal properties." + }, + { + "name": "total_phonon_dos", + "required": false, + "valid_types": "XyData", + "info": "Calculated total DOS." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 301, + "message": "The retrieved temporary folder could not be accessed." + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 303, + "message": "The retrieved folder did not contain the required phonopy file." + }, + { + "status": 304, + "message": "The retrieved folder did not contain one or more expected output files." + }, + { + "status": 305, + "message": "No run mode has been selected." + }, + { + "status": 310, + "message": "The stdout output file could not be read." + }, + { + "status": 311, + "message": "The stdout output file could not be parsed." + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 320, + "message": "The loading of yaml file got an unexpected error." + }, + { + "status": 321, + "message": "The file loading via numpy got an unexpected error." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception." + }, + { + "status": 400, + "message": "The parser was not able to parse one or more files." + } + ] + }, + "class": "aiida_phonopy.calculations.phonopy:PhonopyCalculation" + } + }, + "aiida.data": { + "phonopy.force_constants": "aiida_phonopy.data.force_constants:ForceConstantsData", + "phonopy.phonopy": "aiida_phonopy.data.phonopy:PhonopyData", + "phonopy.preprocess": "aiida_phonopy.data.preprocess:PreProcessData", + "phonopy.raw": "aiida_phonopy.data.raw:RawData" + }, + "aiida.parsers": { + "phonopy.phonopy": "aiida_phonopy.parsers.phonopy:PhonopyParser" + }, + "aiida.workflows": { + "phonopy.phonopy": { + "description": [ + "Abstract workflow for automated frozen phonons.", + "", + " Phonopy is used to produce structures with displacements,", + " while the forces are calculated with a quantum engine of choice.", + "", + " This workchain is meant to be used as a base for other specific force calculato plugin workchains,", + " or as an example on how to set a possible workchain/workflow. For this reason, the outline of", + " this class is not defined, while it provides the inputs and a `setup` method, which can be used", + " in a specific workflow outline. Ideally, the workflow would look like:", + "", + " 1. Setup the preprocess data.", + "", + " This is already provided in this class. It setups a `PreProcessData` node, from where", + " supercell, primitive cell and supercells with displacements can be easily extracted using", + " the methods of the nodes. This node can be taken from `self.ctx.preprocess_data`, and used", + " during the outline of the workflow.", + "", + " 2. Run supercells using the selected quantum engine/force calculator code.", + "", + " In specific code implementations, a force calculation on supercells needs to be run.", + " To get these supercells, one need simply to run:", + "", + " ```self.ctx.preprocess_data.calcfunctions.get_supercells_with_displacements()```", + "", + " This will return a dictionary with all the supercells as StructureData to run for the phonon calculation.", + " The keys of this dictionary are of the type `supercell_{number}`, where `number` is an integer.", + " These numbers are essentials since the `phonopy` force sets is generated following these numbers,", + " in order to make sure to refer to the correct displacement. Thus, it is required to keep track", + " of them.", + " Moreover,a calculation over the pristine supercell structure should be run before hand as reference.", + " This structure can instead be gotten via:", + "", + " ```self.ctx.preprocess_data.calcfunctions.get_supercell()```", + "", + " This will return a StructureData without any label.", + "", + " For an example of implementation, refer to aiidateam/aiida-common-worfklows.", + "", + " * Note: some type of force calculation needs to map some variables from the unitcell to the supercell", + " (and in certain case even the primitive cell), e.g. the atomic spin in VASP. Since this is code dependent,", + " you will need to map these parameters before launching the force calculation of a certain supercell", + " with displacement. This information can be gotten via:", + "", + " ```self.ctx.preprocess_data.get_cells_mappings()```", + "", + " Moreover, consider that cells in phonopy will always (re)fold the atoms in order to have positive coordinates.", + "", + " 3. Inspect all runs and expose the forces and energies (not mandatory) outputs.", + "", + " * Suggested: when the calculation on each supercell has finished (correctly)", + " expose the output forces (and energies) in the dynamical `supercells_forces(energies)` namespace(s).", + " Provide each supercell forces as an `ArrayData` with the forces stored as `forces`", + " (e.g. if your code plugin stores the forces in `TrajectoryData`, extract them with a `calcfunction`).", + " Expose each `ArrayData` choosing a **common prefix**, while as **suffix use", + " _{number}**, with `{number}` referring to the correspective supercell label suffix (that you are supposed to", + " keep track somewhere, e.g. in the label of the code calculation/workchain).", + " Now you can gather all the information in one data noe, i.e. in a `PhonopyData` node.", + " To do so, you can simple run:", + "", + " ```self.ctx.preprocess_data.calcfunctions.generate_phonopy_data(**self.outputs.supercells_forces)```", + "", + " and then expose it as output in the `output_phonopy_data` namespace.", + "", + " * Alternatively: instead of exposing the supercell forces as outputs, you can directly gather all the forces", + " in a dictionary and run directly to the `generate_phonopy_data` method using this dictionary (always using", + " the double *).", + "", + " See the implementation in aiidateam/aiida-common-workflows for an example.", + "", + " 4. (optional) Run the non-analytical constants on the primitive cell.", + "", + " Non-analytical constants should be run for polar insulators. These require usually a linear response code", + " or a finite difference approach (e.g. using the electric enthalpy). Since this is usually the most expensive", + " part, you should run them on the primitive cell. To get it, use:", + "", + " ```self.ctx.preprocess_data.calcfunctions.get_primitive_cell()```", + "", + " If you compute also these, collect the dielectric tensor and the effectic born charges in an ArrayData,", + " with the arraynames `dielectric` and `born_charges` (in Cartesian coordinates!).", + " Then, gather all the information of nac and forces in a unique `PhonopyData` via:", + "", + " ```", + " self.ctx.preprocess_data.calcfunctions.generate_phonopy_data(", + " nac_parameters=nac_paramters,", + " **self.outputs.supercells_forces", + " )", + " ```", + "", + " and expose the output.", + "", + " * Note: we require in the input for generating the full phonopy data, to give the nac in the primitive cell.", + " The primitive cell of phonopy will just rotate the lattice vectors, thus mantaining the Cartasian coordinate", + " system. It can happen, though, that the unitcell is not the primitive cell of the system, meaning that the", + " primitive cell will contain less atoms. We expect in input the nac computed on this number of atoms. If you", + " want, for some reason, compute the nac on the unitcell, you will need to get the reduced nac.", + " To do so, you can consider using a built-in function in phonopy, namely:", + "", + " :py:func:`phonopy.structure.symmetry.elaborate_borns_and_epsilon`" + ], + "spec": { + "inputs": [ + { + "name": "options", + "required": true, + "valid_types": "", + "info": "Options for how to run the workflow." + }, + { + "name": "displacement_generator", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Info for displacements generation. The following flags are allowed:\n distance\n is_plusminus\n is_diagonal\n is_trigonal\n number_of_snapshots\n random_seed\n temperature\n cutoff_frequency" + }, + { + "name": "fc_options", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Options for force constants calculation (optional). The following flags are allowed:\n calculate_full_force_constants\n fc_calculator\n fc_calculator_options" + }, + { + "name": "is_symmetry", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Whether using or not the space group symmetries." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "nac_parameters", + "required": false, + "valid_types": "ArrayData, NoneType", + "info": "Non-analytical parameters." + }, + { + "name": "preprocess_data", + "required": false, + "valid_types": "PhonopyData, PreProcessData, NoneType", + "info": "The preprocess data for frozen phonon calcualtion." + }, + { + "name": "primitive_matrix", + "required": false, + "valid_types": "List, NoneType", + "info": "The matrix used to generate the primitive cell from the input structure in the List format. Allowed shapes are 3x1 and 3x3 lists." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData, NoneType", + "info": "The structure at equilibrium volume." + }, + { + "name": "supercell_matrix", + "required": false, + "valid_types": "List, NoneType", + "info": "The matrix used to generate the supercell from the input structure in the List format. Allowed shapes are 3x1 and 3x3 lists." + }, + { + "name": "symmetry_tolerance", + "required": false, + "valid_types": "Float, NoneType", + "info": "Symmetry tolerance for space group analysis on the input structure." + } + ], + "outputs": [ + { + "name": "output_phonopy_data", + "required": true, + "valid_types": "PhonopyData", + "info": "The phonopy data with supercells displacements, forces and (optionally)nac parameters to use in the post-processing calculation." + }, + { + "name": "supercells_forces", + "required": true, + "valid_types": "ArrayData", + "info": "The forces acting on the atoms of each supercell." + }, + { + "name": "output_force_constants", + "required": false, + "valid_types": "ForceConstantsData", + "info": "The matrix of force constants computed with finite displacements." + }, + { + "name": "supercells", + "required": false, + "valid_types": "StructureData", + "info": "The supercells with displacements." + }, + { + "name": "supercells_energies", + "required": false, + "valid_types": "Float", + "info": "The total energy of each supercell." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_phonopy.workflows.phonopy:PhonopyWorkChain" + } + } + }, + "commits_count": 66, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 4 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-phonopy", + "is_installable": "True" + }, + "aiida-phtools": { + "code_home": "https://github.com/ltalirz/aiida-phtools", + "entry_point_prefix": "phtools", + "pip_url": "aiida-phtools", + "plugin_info": "https://raw.github.com/ltalirz/aiida-phtools/master/setup.json", + "name": "aiida-phtools", + "package_name": "aiida_phtools", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for persistence homology tools, used to analyze nanoporous materials.", + "author": "Leopold Talirz", + "author_email": "leopold.talirz@gmail.com", + "license": "MIT", + "home_page": "https://github.com/ltalirz/aiida-phtools", + "classifiers": [ + "Programming Language :: Python" + ], + "version": "0.1.0a1" + }, + "aiida_version": "*", + "entry_points": { + "aiida.calculations": { + "phtools.dmatrix": "aiida_phtools.calculations.distance_matrix:DistanceMatrixCalculation", + "phtools.surface": "aiida_phtools.calculations.pore_surface:PoreSurfaceCalculation" + }, + "aiida.data": { + "phtools.surface": "aiida_phtools.data.pore_surface:PoreSurfaceParameters" + }, + "aiida.parsers": { + "phtools.dmatrix": "aiida_phtools.parsers.distance_matrix:DistanceMatrixParser", + "phtools.surface": "aiida_phtools.parsers.pore_surface:PoreSurfaceParser" + } + }, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 2 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 2 + }, + { + "colorclass": "red", + "text": "Data", + "count": 1 + } + ], + "pip_install_cmd": "pip install --pre aiida-phtools" + }, + "aiida-plumed": { + "code_home": "https://github.com/ConradJohnston/aiida-plumed", + "entry_point_prefix": "plumed", + "pip_url": "aiida-plumed", + "plugin_info": "https://raw.github.com/ConradJohnston/aiida-plumed/AiiDA-v1.0-compatibility/setup.json", + "name": "aiida-plumed", + "package_name": "aiida_plumed", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin providing support for Plumed2", + "author": "Conrad Johnston", + "author_email": "conrad.s.johnston@googlemail.com", + "license": "MIT", + "home_page": "https://github.com/ConradJohnston/aiida-plumed", + "classifiers": [ + "Development Status :: 2 - Pre-Alpha", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python" + ], + "version": "0.1.0a0" + }, + "aiida_version": ">=1.0.0b3,<2.0.0", + "entry_points": { + "aiida.calculations": { + "plumed": "aiida_plumed.calculations:DiffCalculation" + }, + "aiida.cmdline.data": { + "plumed": "aiida_plumed.cli:data_cli" + }, + "aiida.data": { + "plumed": "aiida_plumed.data:DiffParameters" + }, + "aiida.parsers": { + "plumed": "aiida_plumed.parsers:DiffParser" + } + }, + "commits_count": 0, + "development_status": "pre-alpha", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Data commands)", + "count": 1 + } + ], + "pip_install_cmd": "pip install --pre aiida-plumed", + "is_installable": "True" + }, + "aiida-porousmaterials": { + "code_home": "https://github.com/pzarabadip/aiida-porousmaterials", + "development_status": "stable", + "entry_point_prefix": "porousmaterials", + "pip_url": "aiida-porousmaterials", + "name": "aiida-porousmaterials", + "package_name": "aiida_porousmaterials", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for PorousMaterials code", + "author": "Pezhman Zarabadi-Poor", + "author_email": "pzarabadip@gmail.com", + "license": "MIT", + "home_page": "https://github.com/pzarabadip/aiida-porousmaterials", + "classifiers": [ + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8" + ], + "version": "1.0.0a3" + }, + "aiida_version": null, + "entry_points": { + "aiida.calculations": { + "porousmaterials": { + "description": [ + "This is PorousMaterialsCalculation as the subclass", + " of AiiDA CalcJob to prepare input for the PorousMaterials", + " suite of Julia codes.", + " Please refer to : https://github.com/SimonEnsemble/PorousMaterials.jl" + ], + "spec": { + "inputs": [ + { + "name": "acc_voronoi_nodes", + "required": true, + "valid_types": "SinglefileData", + "info": "Accessible Voronoi nodes calculated by Zeo++" + }, + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "parameters such as cutoff and mixing rules." + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "Framework input file as CIF" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "Additional input parameters" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "dictionary of calculated Voronoi energies" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "ev_output_file", + "required": false, + "valid_types": "SinglefileData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 101, + "message": "The retrieved folder does not contain an output file." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + } + ] + }, + "class": "aiida_porousmaterials.calculations:PorousMaterialsCalculation" + } + }, + "aiida.parsers": { + "porousmaterials": "aiida_porousmaterials.parser:PorousMaterialsParser" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install --pre aiida-porousmaterials", + "is_installable": "True" + }, + "aiida-pseudo": { + "code_home": "https://github.com/aiidateam/aiida-pseudo", + "entry_point_prefix": "pseudo", + "pip_url": "aiida-pseudo", + "plugin_info": "https://raw.github.com/aiidateam/aiida-pseudo/master/setup.cfg", + "name": "aiida-pseudo", + "package_name": "aiida_pseudo", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin that simplifies working with pseudo potentials.", + "author_email": "\"Sebastiaan P. Huber\" ", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ], + "version": "1.1.0" + }, + "aiida_version": ">=2.1,<3.0", + "entry_points": { + "aiida.data": { + "pseudo": "aiida_pseudo.data.pseudo.pseudo:PseudoPotentialData", + "pseudo.jthxml": "aiida_pseudo.data.pseudo.jthxml:JthXmlData", + "pseudo.psf": "aiida_pseudo.data.pseudo.psf:PsfData", + "pseudo.psml": "aiida_pseudo.data.pseudo.psml:PsmlData", + "pseudo.psp8": "aiida_pseudo.data.pseudo.psp8:Psp8Data", + "pseudo.upf": "aiida_pseudo.data.pseudo.upf:UpfData", + "pseudo.vps": "aiida_pseudo.data.pseudo.vps:VpsData" + }, + "aiida.groups": { + "pseudo.family": "aiida_pseudo.groups.family.pseudo:PseudoPotentialFamily", + "pseudo.family.cutoffs": "aiida_pseudo.groups.family.cutoffs:CutoffsPseudoPotentialFamily", + "pseudo.family.pseudo_dojo": "aiida_pseudo.groups.family.pseudo_dojo:PseudoDojoFamily", + "pseudo.family.sssp": "aiida_pseudo.groups.family.sssp:SsspFamily" + }, + "console_scripts": { + "aiida-pseudo": "aiida_pseudo.cli:cmd_root" + } + }, + "commits_count": 24, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "red", + "text": "Data", + "count": 7 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Groups)", + "count": 4 + } + ], + "pip_install_cmd": "pip install aiida-pseudo", + "is_installable": "True" + }, + "aiida-psi4": { + "code_home": "https://github.com/ltalirz/aiida-psi4/tree/master", + "development_status": "beta", + "entry_point_prefix": "psi4", + "pip_url": "git+https://github.com/ltalirz/aiida-psi4", + "name": "aiida-psi4", + "package_name": "aiida_psi4", + "hosted_on": "github.com", + "metadata": { + "author": "Leopold Talirz", + "author_email": "leopold.talirz@gmail.com", + "version": "0.1.0a0", + "description": "AiiDA plugin for the Psi4 Quantum Chemistry package.", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.6.4,<2.0.0", + "entry_points": { + "aiida.data": { + "psi4.atomic_input": "aiida_psi4.data:AtomicInput" + }, + "aiida.calculations": { + "psi4": { + "description": [ + "AiiDA calculation plugin wrapping the psi4 executable." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "psiapi", + "required": false, + "valid_types": "Str, SinglefileData", + "info": "Psi4 input in PsiAPI python format" + }, + { + "name": "qcschema", + "required": false, + "valid_types": "Dict, AtomicInput", + "info": "Psi4 input in QCSchema JSON format" + } + ], + "outputs": [ + { + "name": "qcschema", + "required": true, + "valid_types": "Dict", + "info": "Psi4 output in QCSchema JSON format" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "stdout", + "required": true, + "valid_types": "SinglefileData", + "info": "Psi4 logfile" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 100, + "message": "Calculation did not produce all expected output files." + }, + { + "status": 101, + "message": "Psi4 reported calculation as unsuccessful." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + } + ] + }, + "class": "aiida_psi4.calculations:Psi4Calculation" + } + }, + "aiida.parsers": { + "psi4": "aiida_psi4.parsers:QCSchemaParser" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/ltalirz/aiida-psi4", + "is_installable": "True" + }, + "aiida-pyscf": { + "code_home": "https://github.com/microsoft/aiida-pyscf", + "entry_point_prefix": "pyscf", + "pip_url": "aiida-pyscf", + "plugin_info": "https://github.com/microsoft/aiida-pyscf/blob/main/pyproject.toml", + "name": "aiida-pyscf", + "package_name": "aiida_pyscf", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for the Python-based Simulations of Chemistry Framework (PySCF).", + "author_email": "\"Sebastiaan P. Huber\" , Adam Grofe ", + "classifiers": [ + "Development Status :: 3 - Alpha", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering" + ], + "version": "0.4.0" + }, + "aiida_version": ">=2.3,<3.0", + "entry_points": { + "aiida.calculations": { + "pyscf.base": { + "description": [ + "``CalcJob`` plugin for PySCF." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Input structure with molecular structure definition." + }, + { + "name": "checkpoint", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Checkpoint of a previously completed calculation that failed to converge." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Input parameters used to render the PySCF script template." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "cubegen", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "checkpoint", + "required": false, + "valid_types": "SinglefileData", + "info": "The checkpoint file in case the calculation did not converge. Can be used as an input for a restart." + }, + { + "name": "fcidump", + "required": false, + "valid_types": "SinglefileData", + "info": "Computed fcidump files." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict", + "info": "Various computed properties parsed from the `FILENAME_RESULTS` output file." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The optimized structure if the input parameters contained the `optimizer` key." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The geometry optimization trajectory if the input parameters contained the `optimizer` key." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 302, + "message": "The stdout output file was not retrieved." + }, + { + "status": 303, + "message": "The results JSON file was not retrieved." + }, + { + "status": 410, + "message": "The electronic minimization cycle did not reach self-consistency." + }, + { + "status": 500, + "message": "The ionic minimization cycle did not converge for the given thresholds." + } + ] + }, + "class": "aiida_pyscf.calculations.base:PyscfCalculation" + } + }, + "aiida.parsers": { + "pyscf.base": "aiida_pyscf.parsers.base:PyscfParser" + }, + "aiida.workflows": { + "pyscf.base": { + "description": [ + "Workchain to run a pyscf calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "pyscf", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "cubegen", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "checkpoint", + "required": false, + "valid_types": "SinglefileData", + "info": "The checkpoint file in case the calculation did not converge. Can be used as an input for a restart." + }, + { + "name": "fcidump", + "required": false, + "valid_types": "SinglefileData", + "info": "Computed fcidump files." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict", + "info": "Various computed properties parsed from the `FILENAME_RESULTS` output file." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The optimized structure if the input parameters contained the `optimizer` key." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The geometry optimization trajectory if the input parameters contained the `optimizer` key." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 300, + "message": "The calculation failed with an unrecoverable error." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 310, + "message": "The calculation failed and did not retrieve a checkpoint file from which can be restarted." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_pyscf.workflows.base:PyscfBaseWorkChain" + } + } + }, + "commits_count": 63, + "development_status": "alpha", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-pyscf", + "is_installable": "True" + }, + "aiida-python": { + "entry_point_prefix": "aiidapython", + "code_home": "https://github.com/addman2/aiida-python", + "name": "aiida-python", + "package_name": "aiida_python", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": 59, + "development_status": "planning", + "summaryinfo": [], + "pip_install_cmd": "See source code repository." + }, + "aiida-qeq": { + "code_home": "https://github.com/ltalirz/aiida-qeq", + "development_status": "stable", + "entry_point_prefix": "qeq", + "pip_url": "aiida-qeq", + "plugin_info": "https://raw.githubusercontent.com/ltalirz/aiida-qeq/master/setup.json", + "name": "aiida-qeq", + "package_name": "aiida_qeq", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for computing electronic charges on atoms using equilibration-type models (QEq, EQEq, ...).", + "author": "Leopold Talirz, Daniele Ongari", + "author_email": "leopold.talirz@gmail.com", + "license": "MIT", + "home_page": "https://github.com/ltalirz/aiida-qeq", + "classifiers": [ + "Programming Language :: Python" + ], + "version": "0.1.0" + }, + "aiida_version": ">=0.12.2,<1.0.0", + "entry_points": { + "aiida.calculations": { + "qeq.eqeq": { + "description": [ + "AiiDA calculation plugin for the EQeq code." + ], + "spec": { + "inputs": [ + { + "name": "charge_data", + "required": true, + "valid_types": "SinglefileData", + "info": "File containing information on common oxidation state of the elements." + }, + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "ionization_data", + "required": true, + "valid_types": "SinglefileData", + "info": "File containing ionization data on the elements." + }, + { + "name": "parameters", + "required": true, + "valid_types": "EQeqParameters", + "info": "Command line parameters for EQEQ" + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "Input structure, for which atomic charges are to be computed." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + } + ] + }, + "class": "aiida_qeq.calculations.eqeq:EQeqCalculation" + }, + "qeq.qeq": { + "description": [ + "AiiDA calculation plugin for the Qeq code." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "SinglefileData", + "info": "File containing electronegativity and Idempotential data of the elements." + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "Input structure, for which atomic charges are to be computed." + }, + { + "name": "configure", + "required": false, + "valid_types": "QeqParameters", + "info": "Configuration input for QEQ (configure.input file)" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + } + ] + }, + "class": "aiida_qeq.calculations.qeq:QeqCalculation" + } + }, + "aiida.data": { + "qeq.eqeq": "aiida_qeq.data.eqeq:EQeqParameters", + "qeq.qeq": "aiida_qeq.data.qeq:QeqParameters" + }, + "aiida.parsers": { + "qeq.eqeq": "aiida_qeq.parsers.eqeq:EQeqParser", + "qeq.qeq": "aiida_qeq.parsers.qeq:QeqParser" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 2 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 2 + }, + { + "colorclass": "red", + "text": "Data", + "count": 2 + } + ], + "pip_install_cmd": "pip install aiida-qeq", + "is_installable": "True" + }, + "aiida-qp2": { + "code_home": "https://github.com/TREX-CoE/aiida-qp2", + "entry_point_prefix": "qp2", + "pip_url": "aiida-qp2", + "documentation_url": "https://trex-coe.github.io/aiida-qp2/index.html", + "name": "aiida-qp2", + "package_name": "aiida_qp2", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for the Quantum Package 2.0", + "author": "Evgeny Posenitskiy", + "author_email": "posenitskiy@irsamc.ups-tlse.fr", + "license": "MIT", + "home_page": "https://github.com/TREX-CoE/aiida-qp2", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python" + ], + "version": "0.2.0" + }, + "aiida_version": null, + "entry_points": { + "aiida.calculations": { + "qp2": { + "description": [ + "AiiDA calculation plugin wrapping the Quantum Package code." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Input parameters to generate the input file." + }, + { + "name": "basissets", + "required": false, + "valid_types": "", + "info": "A dictionary of basissets to be used in the calculations: key is the atomic symbol, value is either a single basisset." + }, + { + "name": "code", + "required": false, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "pseudos", + "required": false, + "valid_types": "", + "info": "A dictionary of pseudopotentials to be used in the calculations: key is the atomic symbol, value is a single pseudopotential." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "Additional input parameters." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "Input structrure" + }, + { + "name": "wavefunction", + "required": false, + "valid_types": "SinglefileData", + "info": "The wavefunction file (EZFIO or TREXIO)." + } + ], + "outputs": [ + { + "name": "output_wavefunction", + "required": true, + "valid_types": "SinglefileData", + "info": "The wave function file (EZFIO or TREXIO)" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_energy", + "required": false, + "valid_types": "Float", + "info": "The result of the calculation" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 300, + "message": "Calculation did not produce all expected output files." + }, + { + "status": 400, + "message": "Energy value is not present in the output file." + } + ] + }, + "class": "aiida_qp2.calculations:QP2Calculation" + } + }, + "aiida.parsers": { + "qp2": "aiida_qp2.parsers:QP2Parser" + } + }, + "commits_count": 0, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-qp2", + "is_installable": "True" + }, + "aiida-quantumespresso": { + "code_home": "https://github.com/aiidateam/aiida-quantumespresso", + "documentation_url": "https://aiida-quantumespresso.readthedocs.io/", + "entry_point_prefix": "quantumespresso", + "pip_url": "aiida-quantumespresso", + "plugin_info": "https://raw.github.com/aiidateam/aiida-quantumespresso/master/setup.json", + "name": "aiida-quantumespresso", + "package_name": "aiida_quantumespresso", + "hosted_on": "github.com", + "metadata": { + "description": "The official AiiDA plugin for Quantum ESPRESSO", + "author_email": "The AiiDA team ", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ], + "version": "4.4.0" + }, + "aiida_version": ">=2.3,<3.0", + "entry_points": { + "aiida.calculations": { + "quantumespresso.cp": { + "description": [ + "`CalcJob` implementation for the cp.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "The input parameters that are to be used to construct the input file." + }, + { + "name": "pseudos", + "required": true, + "valid_types": "UpfData, UpfData", + "info": "A mapping of `UpfData` nodes onto the kind name to which they should apply." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parallelization", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parallelization options. The following flags are allowed:\n" + }, + { + "name": "parent_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "An optional working directory of a previously completed calculation to restart from." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional parameters to affect the way the calculation job and the parsing are performed." + }, + { + "name": "vdw_table", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Optional van der Waals table contained in a `SinglefileData`." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_trajectory", + "required": true, + "valid_types": "TrajectoryData", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 301, + "message": "The retrieved temporary folder could not be accessed." + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 303, + "message": "The required XML file is not present in the retrieved folder." + }, + { + "status": 304, + "message": "The retrieved folder contains multiple XML files." + }, + { + "status": 310, + "message": "The stdout output file could not be read." + }, + { + "status": 311, + "message": "The stdout output file could not be parsed." + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 320, + "message": "The required XML file could not be read." + }, + { + "status": 330, + "message": "The required POS file could not be read." + }, + { + "status": 340, + "message": "The required trajectory data could not be read." + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + } + ] + }, + "class": "aiida_quantumespresso.calculations.cp:CpCalculation" + }, + "quantumespresso.create_kpoints_from_distance": { + "description": [ + "Generate a uniformly spaced kpoint mesh for a given structure.", + "", + " The spacing between kpoints in reciprocal space is guaranteed to be at least the defined distance.", + "", + " :param structure: the StructureData to which the mesh should apply", + " :param distance: a Float with the desired distance between kpoints in reciprocal space", + " :param force_parity: a Bool to specify whether the generated mesh should maintain parity", + " :returns: a KpointsData with the generated mesh" + ], + "spec": { + "inputs": [ + { + "name": "distance", + "required": true, + "valid_types": "Data", + "info": "a Float with the desired distance between kpoints in reciprocal space" + }, + { + "name": "force_parity", + "required": true, + "valid_types": "Data", + "info": "a Bool to specify whether the generated mesh should maintain parity" + }, + { + "name": "structure", + "required": true, + "valid_types": "Data", + "info": "the StructureData to which the mesh should apply" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_quantumespresso.calculations.functions.create_kpoints_from_distance:create_kpoints_from_distance" + }, + "quantumespresso.create_magnetic_configuration": { + "description": [ + "Create a new magnetic configuration from the given structure based on a list of magnetic moments per site.", + "", + " To create the new list of kinds, the algorithm loops over all the elements in the structure and makes a list of the", + " sites with that element and their corresponding magnetic moment. Next, it splits this list in three lists:", + "", + " * Zero magnetic moments: Any site that has an absolute magnetic moment lower than ``ztol``", + " * Positive magnetic moments", + " * Negative magnetic moments", + "", + " The algorithm then sorts the positive and negative lists from large to small absolute value, and loops over each of", + " list. New magnetic kinds will be created when the absolute difference between the magnetic moment of the current", + " kind and the site exceeds ``atol``.", + "", + " The positive and negative magnetic moments are handled separately to avoid assigning two sites with opposite signs", + " in their magnetic moment to the same kind and make sure that each kind has the correct magnetic moment, i.e. the", + " largest magnetic moment in absolute value of the sites corresponding to that kind.", + "", + " .. important:: the function currently does not support alloys.", + "", + " :param structure: a `StructureData` instance.", + " :param magnetic_moment_per_site: list of magnetic moments for each site in the structure.", + " :param atol: the absolute tolerance on determining if two sites have the same magnetic moment.", + " :param ztol: threshold for considering a kind to have non-zero magnetic moment." + ], + "spec": { + "inputs": [ + { + "name": "magnetic_moment_per_site", + "required": true, + "valid_types": "Data", + "info": "list of magnetic moments for each site in the structure." + }, + { + "name": "structure", + "required": true, + "valid_types": "Data", + "info": "a `StructureData` instance." + }, + { + "name": "atol", + "required": false, + "valid_types": "Data", + "info": "the absolute tolerance on determining if two sites have the same magnetic moment." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "ztol", + "required": false, + "valid_types": "Data", + "info": "threshold for considering a kind to have non-zero magnetic moment." + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_quantumespresso.calculations.functions.create_magnetic_configuration:create_magnetic_configuration" + }, + "quantumespresso.dos": { + "description": [ + "`CalcJob` implementation for the dos.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData, FolderData", + "info": "" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters for the namelists in the input file." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use an additional node for special settings" + } + ], + "outputs": [ + { + "name": "output_dos", + "required": true, + "valid_types": "XyData", + "info": "" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 310, + "message": "An exception was raised while reading the `stdout` file: {exception}" + }, + { + "status": 311, + "message": "An exception was raised while parsing the `stdout` file: {exception}" + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 330, + "message": "The dos file could not be read from the retrieved folder." + } + ] + }, + "class": "aiida_quantumespresso.calculations.dos:DosCalculation" + }, + "quantumespresso.epw": { + "description": [ + "`CalcJob` implementation for the epw.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "kfpoints", + "required": true, + "valid_types": "KpointsData", + "info": "fine kpoint mesh" + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "coarse kpoint mesh" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "parent_folder_nscf", + "required": true, + "valid_types": "RemoteData", + "info": "the folder of a completed nscf `PwCalculation`" + }, + { + "name": "parent_folder_ph", + "required": true, + "valid_types": "RemoteData", + "info": "the folder of a completed `PhCalculation`" + }, + { + "name": "qfpoints", + "required": true, + "valid_types": "KpointsData", + "info": "fine qpoint mesh" + }, + { + "name": "qpoints", + "required": true, + "valid_types": "KpointsData", + "info": "coarse qpoint mesh" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + } + ] + }, + "class": "aiida_quantumespresso.calculations.epw:EpwCalculation" + }, + "quantumespresso.matdyn": { + "description": [ + "`CalcJob` implementation for the matdyn.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "force_constants", + "required": true, + "valid_types": "ForceConstantsData", + "info": "" + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "Kpoints on which to calculate the phonon frequencies." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters for the namelists in the input file." + }, + { + "name": "parent_folder", + "required": false, + "valid_types": "RemoteData, FolderData, SinglefileData, NoneType", + "info": "Use a local or remote folder as parent folder (for restarts and similar)" + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use an additional node for special settings" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_phonon_bands", + "required": true, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 310, + "message": "An exception was raised while reading the `stdout` file: {exception}" + }, + { + "status": 311, + "message": "An exception was raised while parsing the `stdout` file: {exception}" + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 330, + "message": "The output frequencies file could not be read from the retrieved folder." + }, + { + "status": 410, + "message": "Number of kpoints not found in the output data" + }, + { + "status": 411, + "message": "Number of kpoints in the inputs is not commensurate with those in the output" + } + ] + }, + "class": "aiida_quantumespresso.calculations.matdyn:MatdynCalculation" + }, + "quantumespresso.merge_ph_outputs": { + "description": [ + "Calcfunction to merge outputs from multiple `ph.x` calculations with different q-points." + ], + "spec": { + "inputs": [ + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_quantumespresso.calculations.functions.merge_ph_outputs:merge_ph_outputs" + }, + "quantumespresso.namelists": { + "description": [ + "`CalcJob` implementation to serve as base class for simple post-processing tools of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters for the namelists in the input file." + }, + { + "name": "parent_folder", + "required": false, + "valid_types": "RemoteData, FolderData, SinglefileData, NoneType", + "info": "Use a local or remote folder as parent folder (for restarts and similar)" + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use an additional node for special settings" + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 310, + "message": "An exception was raised while reading the `stdout` file: {exception}" + }, + { + "status": 311, + "message": "An exception was raised while parsing the `stdout` file: {exception}" + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + } + ] + }, + "class": "aiida_quantumespresso.calculations.namelists:NamelistsCalculation" + }, + "quantumespresso.neb": { + "description": [ + "Nudged Elastic Band code (neb.x) of Quantum ESPRESSO distribution." + ], + "spec": { + "inputs": [ + { + "name": "first_structure", + "required": true, + "valid_types": "StructureData", + "info": "Initial structure" + }, + { + "name": "last_structure", + "required": true, + "valid_types": "StructureData", + "info": "Final structure" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "NEB-specific input parameters" + }, + { + "name": "pw", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parent_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "An optional working directory of a previously completed calculation to restart from." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional parameters to affect the way the calculation job and the parsing are performed." + } + ], + "outputs": [ + { + "name": "output_mep", + "required": true, + "valid_types": "ArrayData", + "info": "The original and interpolated energy profiles along the minimum-energy path (mep)" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The output parameters dictionary of the NEB calculation" + }, + { + "name": "output_trajectory", + "required": true, + "valid_types": "TrajectoryData", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "iteration_array", + "required": false, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 303, + "message": "The required XML file is not present in the retrieved folder." + }, + { + "status": 320, + "message": "The XML output file could not be read." + }, + { + "status": 321, + "message": "The XML output file could not be parsed." + }, + { + "status": 322, + "message": "The XML output file has an unsupported format." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception: {exception}" + } + ] + }, + "class": "aiida_quantumespresso.calculations.neb:NebCalculation" + }, + "quantumespresso.open_grid": { + "description": [ + "``CalcJob`` implementation for the ``open_grid.x`` code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData, FolderData", + "info": "The output folder of a completed `PwCalculation` on an irreducible Brillouin zone" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters for the namelists in the input file." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use an additional node for special settings" + } + ], + "outputs": [ + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "The explicit list of kpoints of the unfolded kmesh" + }, + { + "name": "kpoints_mesh", + "required": true, + "valid_types": "KpointsData", + "info": "The dimensions of the unfolded kmesh" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 310, + "message": "An exception was raised while reading the `stdout` file: {exception}" + }, + { + "status": 311, + "message": "An exception was raised while parsing the `stdout` file: {exception}" + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 312, + "message": "Found rotation or fractional translation not compatible with FFT grid." + }, + { + "status": 350, + "message": "Mismatch between kmesh dimensions and number of kpoints." + } + ] + }, + "class": "aiida_quantumespresso.calculations.open_grid:OpenGridCalculation" + }, + "quantumespresso.ph": { + "description": [ + "`CalcJob` implementation for the ph.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData", + "info": "the folder of a completed `PwCalculation`" + }, + { + "name": "qpoints", + "required": true, + "valid_types": "KpointsData", + "info": "qpoint mesh" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 305, + "message": "Both the stdout and XML output files could not be read or parsed." + }, + { + "status": 310, + "message": "The stdout output file could not be read." + }, + { + "status": 311, + "message": "The stdout output file could not be parsed." + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception: {exception}" + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 410, + "message": "The minimization cycle did not reach self-consistency." + }, + { + "status": 462, + "message": "The code failed during the cholesky factorization." + } + ] + }, + "class": "aiida_quantumespresso.calculations.ph:PhCalculation" + }, + "quantumespresso.pp": { + "description": [ + "`CalcJob` implementation for the pp.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Use a node that specifies the input parameters for the namelists" + }, + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData, FolderData", + "info": "Output folder of a completed `PwCalculation`" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional parameters to affect the way the calculation job is performed." + } + ], + "outputs": [ + { + "name": "output_data", + "required": true, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "output_data_multiple", + "required": true, + "valid_types": "ArrayData", + "info": "" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 301, + "message": "The retrieved temporary folder could not be accessed." + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 303, + "message": "The parent folder did not contain the required XML output file." + }, + { + "status": 310, + "message": "The stdout output file could not be read." + }, + { + "status": 311, + "message": "The stdout output file could not be parsed." + }, + { + "status": 312, + "message": "The stdout output file was incomplete." + }, + { + "status": 330, + "message": "The formatted data output file `{filename}` was not present in the retrieved (temporary) folder." + }, + { + "status": 331, + "message": "The formatted data output file `{filename}` could not be read." + }, + { + "status": 332, + "message": "The data file format is not supported by the parser" + }, + { + "status": 333, + "message": "The formatted data output file `{filename}` could not be parsed: {exception}" + }, + { + "status": 340, + "message": "The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception: {exception}" + } + ] + }, + "class": "aiida_quantumespresso.calculations.pp:PpCalculation" + }, + "quantumespresso.projwfc": { + "description": [ + "`CalcJob` implementation for the projwfc.x code of Quantum ESPRESSO.", + "", + " Projwfc.x code of the Quantum ESPRESSO distribution, handles the the computation of projections of bloch", + " wavefunctions onto atomic orbitals.", + "", + " . For more information, refer to http://www.quantum-espresso.org/" + ], + "spec": { + "inputs": [ + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData, FolderData", + "info": "The output folder of a pw.x calculation" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters for the namelists in the input file." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use an additional node for special settings" + } + ], + "outputs": [ + { + "name": "Dos", + "required": true, + "valid_types": "XyData", + "info": "" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "bands", + "required": false, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "bands_down", + "required": false, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "bands_up", + "required": false, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "projections", + "required": false, + "valid_types": "ProjectionData", + "info": "" + }, + { + "name": "projections_down", + "required": false, + "valid_types": "ProjectionData", + "info": "" + }, + { + "name": "projections_up", + "required": false, + "valid_types": "ProjectionData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 301, + "message": "The retrieved temporary folder could not be accessed." + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 303, + "message": "The retrieved folder did not contain the required XML file." + }, + { + "status": 310, + "message": "An exception was raised while reading the `stdout` file: {exception}" + }, + { + "status": 311, + "message": "An exception was raised while parsing the `stdout` file: {exception}" + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 320, + "message": "The XML output file could not be read." + }, + { + "status": 321, + "message": "The XML output file could not be parsed." + }, + { + "status": 322, + "message": "The XML output file has an unsupported format." + }, + { + "status": 330, + "message": "The pdos_tot file could not be read from the retrieved folder." + }, + { + "status": 340, + "message": "An exception was raised parsing bands and projections." + } + ] + }, + "class": "aiida_quantumespresso.calculations.projwfc:ProjwfcCalculation" + }, + "quantumespresso.pw": { + "description": [ + "`CalcJob` implementation for the pw.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "kpoint mesh or kpoint path" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "The input parameters that are to be used to construct the input file." + }, + { + "name": "pseudos", + "required": true, + "valid_types": "UpfData, UpfData", + "info": "A mapping of `UpfData` nodes onto the kind name to which they should apply." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "hubbard_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "SinglefileData node containing the output Hubbard parameters from a HpCalculation" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parallelization", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parallelization options. The following flags are allowed:\nnpool : The number of 'pools', each taking care of a group of k-points.\nnband : The number of 'band groups', each taking care of a group of Kohn-Sham orbitals.\nntg : The number of 'task groups' across which the FFT planes are distributed.\nndiag : The number of 'linear algebra groups' used when parallelizing the subspace diagonalization / iterative orthonormalization. By default, no parameter is passed to Quantum ESPRESSO, meaning it will use its default." + }, + { + "name": "parent_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "An optional working directory of a previously completed calculation to restart from." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional parameters to affect the way the calculation job and the parsing are performed." + }, + { + "name": "vdw_table", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Optional van der Waals table contained in a `SinglefileData`." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The `output_parameters` output node of the successful calculation." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_atomic_occupations", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_band", + "required": false, + "valid_types": "BandsData", + "info": "The `output_band` output node of the successful calculation if present." + }, + { + "name": "output_kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "" + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The `output_structure` output node of the successful calculation if present." + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 301, + "message": "The retrieved temporary folder could not be accessed." + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 303, + "message": "The retrieved folder did not contain the required XML file." + }, + { + "status": 304, + "message": "The retrieved folder contained multiple XML files." + }, + { + "status": 305, + "message": "Both the stdout and XML output files could not be read or parsed." + }, + { + "status": 310, + "message": "The stdout output file could not be read." + }, + { + "status": 311, + "message": "The stdout output file could not be parsed." + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 320, + "message": "The XML output file could not be read." + }, + { + "status": 321, + "message": "The XML output file could not be parsed." + }, + { + "status": 322, + "message": "The XML output file has an unsupported format." + }, + { + "status": 340, + "message": "The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception: {exception}" + }, + { + "status": 360, + "message": "The code failed in finding a valid reciprocal lattice vector." + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 410, + "message": "The electronic minimization cycle did not reach self-consistency." + }, + { + "status": 461, + "message": "The code failed with negative dexx in the exchange calculation." + }, + { + "status": 462, + "message": "The code failed during the cholesky factorization." + }, + { + "status": 463, + "message": "Too many bands failed to converge during the diagonalization." + }, + { + "status": 464, + "message": "The S matrix was found to be not positive definite." + }, + { + "status": 465, + "message": "The `zhegvd` failed in the PPCG diagonalization." + }, + { + "status": 466, + "message": "The `[Q, R] = qr(X, 0)` failed in the PPCG diagonalization." + }, + { + "status": 467, + "message": "The eigenvector failed to converge." + }, + { + "status": 468, + "message": "The factorization in the Broyden routine failed." + }, + { + "status": 481, + "message": "The k-point parallelization \"npools\" is too high, some nodes have no k-points." + }, + { + "status": 500, + "message": "The ionic minimization cycle did not converge for the given thresholds." + }, + { + "status": 501, + "message": "Then ionic minimization cycle converged but the thresholds are exceeded in the final SCF." + }, + { + "status": 502, + "message": "The ionic minimization cycle did not converge after the maximum number of steps." + }, + { + "status": 503, + "message": "The ionic minimization cycle did not finish because the calculation was interrupted but a partial trajectory and output structure was successfully parsed which can be used for a restart." + }, + { + "status": 510, + "message": "The electronic minimization cycle failed during an ionic minimization cycle." + }, + { + "status": 511, + "message": "The ionic minimization cycle converged, but electronic convergence was not reached in the final SCF." + }, + { + "status": 520, + "message": "The ionic minimization cycle terminated prematurely because of two consecutive failures in the BFGS algorithm." + }, + { + "status": 521, + "message": "The ionic minimization cycle terminated prematurely because of two consecutive failures in the BFGS algorithm and electronic convergence failed in the final SCF." + }, + { + "status": 531, + "message": "The electronic minimization cycle did not reach self-consistency." + }, + { + "status": 541, + "message": "The variable cell optimization broke the symmetry of the k-points." + }, + { + "status": 542, + "message": "The cell relaxation caused a significant volume contraction and there is not enough space allocated for radial FFT." + }, + { + "status": 710, + "message": "The electronic minimization cycle did not reach self-consistency, but `scf_must_converge` is `False` and/or `electron_maxstep` is 0." + } + ] + }, + "class": "aiida_quantumespresso.calculations.pw:PwCalculation" + }, + "quantumespresso.pw2gw": { + "description": [ + "`CalcJob` implementation for the pw2gw.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Output folder of a completed `PwCalculation`" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters for the namelists in the input file." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use an additional node for special settings" + } + ], + "outputs": [ + { + "name": "eps", + "required": true, + "valid_types": "ArrayData", + "info": "The `eps` output node containing 5 arrays `energy`, `epsX`, `epsY`, `epsZ`, `epsTOT`" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The `output_parameters` output node of the successful calculation.`" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 305, + "message": "The eps*.dat output files could not be read or parsed." + }, + { + "status": 310, + "message": "An exception was raised while reading the `stdout` file: {exception}" + }, + { + "status": 311, + "message": "An exception was raised while parsing the `stdout` file: {exception}" + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 330, + "message": "The eps*.dat output files do not have the expected shape (N, 2)." + }, + { + "status": 331, + "message": "The eps*.dat output files contains different values of energies." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception: {exception}" + } + ] + }, + "class": "aiida_quantumespresso.calculations.pw2gw:Pw2gwCalculation" + }, + "quantumespresso.pw2wannier90": { + "description": [ + "`CalcJob` implementation for the pw2wannier.x code of Quantum ESPRESSO.", + "", + " For more information, refer to http://www.quantum-espresso.org/ and http://www.wannier.org/" + ], + "spec": { + "inputs": [ + { + "name": "nnkp_file", + "required": true, + "valid_types": "SinglefileData", + "info": "A SinglefileData containing the .nnkp file generated by wannier90.x -pp" + }, + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData, FolderData", + "info": "The output folder of a pw.x calculation" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters for the namelists in the input file." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use an additional node for special settings" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 310, + "message": "An exception was raised while reading the `stdout` file: {exception}" + }, + { + "status": 311, + "message": "An exception was raised while parsing the `stdout` file: {exception}" + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 340, + "message": "Encountered a generic error message" + }, + { + "status": 350, + "message": "The parser raised an unexpected exception: {exception}" + } + ] + }, + "class": "aiida_quantumespresso.calculations.pw2wannier90:Pw2wannier90Calculation" + }, + "quantumespresso.pwimmigrant": { + "description": [ + "Create a PwCalculation object that can be used to import old jobs.", + "", + " This is a sublass of aiida_quantumespresso.calculations.PwCalculation", + " with slight modifications to some of the class variables and additional", + " methods that", + "", + " a. parse the job's input file to create the calculation's input", + " nodes that would exist if the calculation were submitted using AiiDa,", + " b. bypass the functions of the daemon, and prepare the node's attributes", + " such that all the processes (copying of the files to the repository,", + " results parsing, ect.) can be performed", + "", + " .. note:: The keyword arguments of PwCalculation are also available.", + "", + " :param remote_workdir: Absolute path to the directory where the job was run.", + " The transport of the computer you link ask input to the calculation is", + " the transport that will be used to retrieve the calculation's files.", + " Therefore, ``remote_workdir`` should be the absolute path to the job's", + " directory on that computer.", + " :type remote_workdir: str", + "", + " :param input_file_name: The file name of the job's input file.", + " :type input_file_name: str", + "", + " :param output_file_name: The file name of the job's output file (i.e. the", + " file containing the stdout of QE).", + " :type output_file_name: str" + ], + "spec": { + "inputs": [ + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "kpoint mesh or kpoint path" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "The input parameters that are to be used to construct the input file." + }, + { + "name": "pseudos", + "required": true, + "valid_types": "UpfData, UpfData", + "info": "A mapping of `UpfData` nodes onto the kind name to which they should apply." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "hubbard_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "SinglefileData node containing the output Hubbard parameters from a HpCalculation" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parallelization", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parallelization options. The following flags are allowed:\nnpool : The number of 'pools', each taking care of a group of k-points.\nnband : The number of 'band groups', each taking care of a group of Kohn-Sham orbitals.\nntg : The number of 'task groups' across which the FFT planes are distributed.\nndiag : The number of 'linear algebra groups' used when parallelizing the subspace diagonalization / iterative orthonormalization. By default, no parameter is passed to Quantum ESPRESSO, meaning it will use its default." + }, + { + "name": "parent_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "An optional working directory of a previously completed calculation to restart from." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional parameters to affect the way the calculation job and the parsing are performed." + }, + { + "name": "vdw_table", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "Optional van der Waals table contained in a `SinglefileData`." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The `output_parameters` output node of the successful calculation." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_atomic_occupations", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_band", + "required": false, + "valid_types": "BandsData", + "info": "The `output_band` output node of the successful calculation if present." + }, + { + "name": "output_kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "" + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The `output_structure` output node of the successful calculation if present." + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 301, + "message": "The retrieved temporary folder could not be accessed." + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 303, + "message": "The retrieved folder did not contain the required XML file." + }, + { + "status": 304, + "message": "The retrieved folder contained multiple XML files." + }, + { + "status": 305, + "message": "Both the stdout and XML output files could not be read or parsed." + }, + { + "status": 310, + "message": "The stdout output file could not be read." + }, + { + "status": 311, + "message": "The stdout output file could not be parsed." + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 320, + "message": "The XML output file could not be read." + }, + { + "status": 321, + "message": "The XML output file could not be parsed." + }, + { + "status": 322, + "message": "The XML output file has an unsupported format." + }, + { + "status": 340, + "message": "The calculation stopped prematurely because it ran out of walltime but the job was killed by the scheduler before the files were safely written to disk for a potential restart." + }, + { + "status": 350, + "message": "The parser raised an unexpected exception: {exception}" + }, + { + "status": 360, + "message": "The code failed in finding a valid reciprocal lattice vector." + }, + { + "status": 400, + "message": "The calculation stopped prematurely because it ran out of walltime." + }, + { + "status": 410, + "message": "The electronic minimization cycle did not reach self-consistency." + }, + { + "status": 461, + "message": "The code failed with negative dexx in the exchange calculation." + }, + { + "status": 462, + "message": "The code failed during the cholesky factorization." + }, + { + "status": 463, + "message": "Too many bands failed to converge during the diagonalization." + }, + { + "status": 464, + "message": "The S matrix was found to be not positive definite." + }, + { + "status": 465, + "message": "The `zhegvd` failed in the PPCG diagonalization." + }, + { + "status": 466, + "message": "The `[Q, R] = qr(X, 0)` failed in the PPCG diagonalization." + }, + { + "status": 467, + "message": "The eigenvector failed to converge." + }, + { + "status": 468, + "message": "The factorization in the Broyden routine failed." + }, + { + "status": 481, + "message": "The k-point parallelization \"npools\" is too high, some nodes have no k-points." + }, + { + "status": 500, + "message": "The ionic minimization cycle did not converge for the given thresholds." + }, + { + "status": 501, + "message": "Then ionic minimization cycle converged but the thresholds are exceeded in the final SCF." + }, + { + "status": 502, + "message": "The ionic minimization cycle did not converge after the maximum number of steps." + }, + { + "status": 503, + "message": "The ionic minimization cycle did not finish because the calculation was interrupted but a partial trajectory and output structure was successfully parsed which can be used for a restart." + }, + { + "status": 510, + "message": "The electronic minimization cycle failed during an ionic minimization cycle." + }, + { + "status": 511, + "message": "The ionic minimization cycle converged, but electronic convergence was not reached in the final SCF." + }, + { + "status": 520, + "message": "The ionic minimization cycle terminated prematurely because of two consecutive failures in the BFGS algorithm." + }, + { + "status": 521, + "message": "The ionic minimization cycle terminated prematurely because of two consecutive failures in the BFGS algorithm and electronic convergence failed in the final SCF." + }, + { + "status": 531, + "message": "The electronic minimization cycle did not reach self-consistency." + }, + { + "status": 541, + "message": "The variable cell optimization broke the symmetry of the k-points." + }, + { + "status": 542, + "message": "The cell relaxation caused a significant volume contraction and there is not enough space allocated for radial FFT." + }, + { + "status": 710, + "message": "The electronic minimization cycle did not reach self-consistency, but `scf_must_converge` is `False` and/or `electron_maxstep` is 0." + } + ] + }, + "class": "aiida_quantumespresso.calculations.pwimmigrant:PwimmigrantCalculation" + }, + "quantumespresso.q2r": { + "description": [ + "`CalcJob` implementation for the q2r.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData, FolderData", + "info": "" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters for the namelists in the input file." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use an additional node for special settings" + } + ], + "outputs": [ + { + "name": "force_constants", + "required": true, + "valid_types": "ForceConstantsData", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 310, + "message": "An exception was raised while reading the `stdout` file: {exception}" + }, + { + "status": 311, + "message": "An exception was raised while parsing the `stdout` file: {exception}" + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 330, + "message": "The force constants file could not be read." + } + ] + }, + "class": "aiida_quantumespresso.calculations.q2r:Q2rCalculation" + }, + "quantumespresso.seekpath_structure_analysis": { + "description": [ + "Primitivize the structure with SeeKpath and generate the high symmetry k-point path through its Brillouin zone.", + "", + " This calcfunction will take a structure and pass it through SeeKpath to get the normalized primitive cell and the", + " path of high symmetry k-points through its Brillouin zone. Note that the returned primitive cell may differ from the", + " original structure in which case the k-points are only congruent with the primitive cell.", + "", + " The keyword arguments can be used to specify various Seekpath parameters, such as:", + "", + " with_time_reversal: True", + " reference_distance: 0.025", + " recipe: 'hpkot'", + " threshold: 1e-07", + " symprec: 1e-05", + " angle_tolerance: -1.0", + "", + " Note that exact parameters that are available and their defaults will depend on your Seekpath version." + ], + "spec": { + "inputs": [ + { + "name": "structure", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_quantumespresso.calculations.functions.seekpath_structure_analysis:seekpath_structure_analysis" + }, + "quantumespresso.xspectra": { + "description": [ + "CalcJob implementation for the xspectra.x code of Quantum ESPRESSO." + ], + "spec": { + "inputs": [ + { + "name": "core_wfc_data", + "required": true, + "valid_types": "SinglefileData", + "info": "Core wavefunction data, generated by the upf2plotcore.sh utility" + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "The K-point sampling to be used for the XSpectra calculation" + }, + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData", + "info": "" + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "gamma_file", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "An optional file containing the data for the broadening function used when `gamma_mode=file`" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Parameters for the namelists in the input file." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Use an additional node for special settings" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "spectra", + "required": true, + "valid_types": "XyData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 302, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 310, + "message": "An exception was raised while reading the `stdout` file: {exception}" + }, + { + "status": 311, + "message": "An exception was raised while parsing the `stdout` file: {exception}" + }, + { + "status": 312, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 313, + "message": "xiabs was set incorrectly, check and ensure that the index value correctly refers to the atomic species containing the core-hole (where the index starts from 1)." + }, + { + "status": 314, + "message": "xiabs was either set to 0 or less, or was greater than ntyp." + }, + { + "status": 330, + "message": "The xspectra output file could not be read from the retrieved folder." + }, + { + "status": 331, + "message": "The spectrum data file could not be read using NumPy genfromtxt" + }, + { + "status": 400, + "message": "The time limit set for the calculation was exceeded, and the job wrote a save file before exiting." + } + ] + }, + "class": "aiida_quantumespresso.calculations.xspectra:XspectraCalculation" + } + }, + "aiida.data": { + "quantumespresso.force_constants": "aiida_quantumespresso.data.force_constants:ForceConstantsData", + "quantumespresso.hubbard_structure": "aiida_quantumespresso.data.hubbard_structure:HubbardStructureData" + }, + "aiida.parsers": { + "quantumespresso.cp": "aiida_quantumespresso.parsers.cp:CpParser", + "quantumespresso.dos": "aiida_quantumespresso.parsers.dos:DosParser", + "quantumespresso.matdyn": "aiida_quantumespresso.parsers.matdyn:MatdynParser", + "quantumespresso.neb": "aiida_quantumespresso.parsers.neb:NebParser", + "quantumespresso.open_grid": "aiida_quantumespresso.parsers.open_grid:OpenGridParser", + "quantumespresso.ph": "aiida_quantumespresso.parsers.ph:PhParser", + "quantumespresso.pp": "aiida_quantumespresso.parsers.pp:PpParser", + "quantumespresso.projwfc": "aiida_quantumespresso.parsers.projwfc:ProjwfcParser", + "quantumespresso.pw": "aiida_quantumespresso.parsers.pw:PwParser", + "quantumespresso.pw2gw": "aiida_quantumespresso.parsers.pw2gw:Pw2gwParser", + "quantumespresso.pw2wannier90": "aiida_quantumespresso.parsers.pw2wannier90:Pw2wannier90Parser", + "quantumespresso.q2r": "aiida_quantumespresso.parsers.q2r:Q2rParser", + "quantumespresso.xspectra": "aiida_quantumespresso.parsers.xspectra:XspectraParser" + }, + "aiida.tools.calculations": { + "quantumespresso.pw": "aiida_quantumespresso.tools.calculations.pw:PwCalculationTools" + }, + "aiida.tools.data.orbitals": { + "noncollinearhydrogen": "aiida_quantumespresso.tools.data.orbital.noncollinearhydrogen:NoncollinearHydrogenOrbital", + "spinorbithydrogen": "aiida_quantumespresso.tools.data.orbital.spinorbithydrogen:SpinorbitHydrogenOrbital" + }, + "aiida.workflows": { + "quantumespresso.matdyn.base": { + "description": [ + "Workchain to run a Quantum ESPRESSO matdyn.x calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "matdyn", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_phonon_bands", + "required": true, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 300, + "message": "The calculation failed with an unrecoverable error." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_quantumespresso.workflows.matdyn.base:MatdynBaseWorkChain" + }, + "quantumespresso.pdos": { + "description": [ + "A WorkChain to compute Total & Partial Density of States of a structure, using Quantum Espresso." + ], + "spec": { + "inputs": [ + { + "name": "dos", + "required": true, + "valid_types": "Data", + "info": "Input parameters for the `dos.x` calculation. Note that the `Emin`, `Emax` and `DeltaE` values have to match with those in the `projwfc` inputs." + }, + { + "name": "nscf", + "required": true, + "valid_types": "Data", + "info": "Inputs for the `PwBaseWorkChain` of the `nscf` calculation." + }, + { + "name": "projwfc", + "required": true, + "valid_types": "Data", + "info": "Input parameters for the `projwfc.x` calculation. Note that the `Emin`, `Emax` and `DeltaE` values have to match with those in the `dos` inputs." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure." + }, + { + "name": "align_to_fermi", + "required": false, + "valid_types": "Bool", + "info": "If true, Emin=>Emin-Efermi & Emax=>Emax-Efermi, where Efermi is taken from the `nscf` calculation. Note that it only makes sense to align `Emax` and `Emin` to the fermi level in case they are actually provided by in the `dos` and `projwfc` inputs, since otherwise the " + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If ``True``, work directories of all called calculation will be cleaned at the end of execution." + }, + { + "name": "dry_run", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Terminate workchain steps before submitting calculations (test purposes only)." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "scf", + "required": false, + "valid_types": "Data", + "info": "Inputs for the `PwBaseWorkChain` of the `scf` calculation." + }, + { + "name": "serial_clean", + "required": false, + "valid_types": "Bool, NoneType", + "info": "If ``True``, calculations will be run in serial, and work directories will be cleaned before the next step." + } + ], + "outputs": [ + { + "name": "dos", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "nscf", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "projwfc", + "required": true, + "valid_types": "", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 202, + "message": "Neither the `kpoints` nor the `kpoints_distance` input was specified for base or nscf namespaces." + }, + { + "status": 401, + "message": "the SCF sub process failed" + }, + { + "status": 402, + "message": "the NSCF sub process failed" + }, + { + "status": 403, + "message": "the DOS sub process failed" + }, + { + "status": 404, + "message": "the PROJWFC sub process failed" + }, + { + "status": 404, + "message": "both the DOS and PROJWFC sub process failed" + } + ] + }, + "class": "aiida_quantumespresso.workflows.pdos:PdosWorkChain" + }, + "quantumespresso.ph.base": { + "description": [ + "Workchain to run a Quantum ESPRESSO ph.x calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "ph", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "only_initialization", + "required": false, + "valid_types": "Bool", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 204, + "message": "The `metadata.options` did not specify both `resources.num_machines` and `max_wallclock_seconds`. This exit status has been deprecated as the check it corresponded to was incorrect." + }, + { + "status": 300, + "message": "The calculation failed with an unrecoverable error." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 401, + "message": "The work chain failed to merge the q-points data from multiple `PhCalculation`s because not all q-points were parsed." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_quantumespresso.workflows.ph.base:PhBaseWorkChain" + }, + "quantumespresso.pw.bands": { + "description": [ + "Workchain to compute a band structure for a given structure using Quantum ESPRESSO pw.x.", + "", + " The logic for the computation of various parameters for the BANDS step is as follows:", + "", + " Number of bands:", + " One can specify the number of bands to be used in the BANDS step either directly through the input parameters", + " `bands.pw.parameters.SYSTEM.nbnd` or through `nbands_factor`. Note that specifying both is not allowed. When", + " neither is specified nothing will be set by the work chain and the default of Quantum ESPRESSO will end up being", + " used. If the `nbands_factor` is specified the maximum value of the following values will be used:", + "", + " * `nbnd` of the preceding SCF calculation", + " * 0.5 * nelectrons * nbands_factor", + " * 0.5 * nelectrons + 4", + "", + " Kpoints:", + " There are three options; specify either an existing `KpointsData` through `bands_kpoints`, or specify the", + " `bands_kpoint_distance`, or specify neither. For the former those exact kpoints will be used for the BANDS step.", + " In the two other cases, the structure will first be normalized using SeekPath and the path along high-symmetry", + " k-points will be generated on that structure. The distance between kpoints for the path will be equal to that", + " of `bands_kpoints_distance` or the SeekPath default if not specified." + ], + "spec": { + "inputs": [ + { + "name": "bands", + "required": true, + "valid_types": "Data", + "info": "Inputs for the `PwBaseWorkChain` for the BANDS calculation." + }, + { + "name": "scf", + "required": true, + "valid_types": "Data", + "info": "Inputs for the `PwBaseWorkChain` for the SCF calculation." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The inputs structure." + }, + { + "name": "bands_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "Explicit kpoints to use for the BANDS calculation. Specify either this or `bands_kpoints_distance`." + }, + { + "name": "bands_kpoints_distance", + "required": false, + "valid_types": "Float, NoneType", + "info": "Minimum kpoints distance for the BANDS calculation. Specify either this or `bands_kpoints`." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation will be cleaned at the end of execution." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "nbands_factor", + "required": false, + "valid_types": "Float, NoneType", + "info": "The number of bands for the BANDS calculation is that used for the SCF multiplied by this factor." + }, + { + "name": "relax", + "required": false, + "valid_types": "Data", + "info": "Inputs for the `PwRelaxWorkChain`, if not specified at all, the relaxation step is skipped." + } + ], + "outputs": [ + { + "name": "band_parameters", + "required": true, + "valid_types": "Dict", + "info": "The output parameters of the BANDS `PwBaseWorkChain`." + }, + { + "name": "band_structure", + "required": true, + "valid_types": "BandsData", + "info": "The computed band structure." + }, + { + "name": "scf_parameters", + "required": true, + "valid_types": "Dict", + "info": "The output parameters of the SCF `PwBaseWorkChain`." + }, + { + "name": "primitive_structure", + "required": false, + "valid_types": "StructureData", + "info": "The normalized and primitivized structure for which the bands are computed." + }, + { + "name": "seekpath_parameters", + "required": false, + "valid_types": "Dict", + "info": "The parameters used in the SeeKpath call to normalize the input or relaxed structure." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 201, + "message": "Cannot specify both `nbands_factor` and `bands.pw.parameters.SYSTEM.nbnd`." + }, + { + "status": 202, + "message": "Cannot specify both `bands_kpoints` and `bands_kpoints_distance`." + }, + { + "status": 401, + "message": "The PwRelaxWorkChain sub process failed" + }, + { + "status": 402, + "message": "The scf PwBasexWorkChain sub process failed" + }, + { + "status": 403, + "message": "The bands PwBasexWorkChain sub process failed" + } + ] + }, + "class": "aiida_quantumespresso.workflows.pw.bands:PwBandsWorkChain" + }, + "quantumespresso.pw.base": { + "description": [ + "Workchain to run a Quantum ESPRESSO pw.x calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "pw", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "An explicit k-points list or mesh. Either this or `kpoints_distance` has to be provided." + }, + { + "name": "kpoints_distance", + "required": false, + "valid_types": "Float, NoneType", + "info": "The minimum desired distance in 1/\u212b between k-points in reciprocal space. The explicit k-points will be generated automatically by a calculation function based on the input structure." + }, + { + "name": "kpoints_force_parity", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Optional input when constructing the k-points based on a desired `kpoints_distance`. Setting this to `True` will force the k-point mesh to have an even number of points along each lattice vector except for any non-periodic directions." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The `output_parameters` output node of the successful calculation." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_atomic_occupations", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_band", + "required": false, + "valid_types": "BandsData", + "info": "The `output_band` output node of the successful calculation if present." + }, + { + "name": "output_kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "" + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The `output_structure` output node of the successful calculation if present." + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 201, + "message": "The explicit `pseudos` or `pseudo_family` could not be used to get the necessary pseudos." + }, + { + "status": 202, + "message": "Neither the `kpoints` nor the `kpoints_distance` input was specified." + }, + { + "status": 203, + "message": "Neither the `options` nor `automatic_parallelization` input was specified. This exit status has been deprecated as the check it corresponded to was incorrect." + }, + { + "status": 204, + "message": "The `metadata.options` did not specify both `resources.num_machines` and `max_wallclock_seconds`. This exit status has been deprecated as the check it corresponded to was incorrect." + }, + { + "status": 210, + "message": "Required key for `automatic_parallelization` was not specified.This exit status has been deprecated as the automatic parallellization feature was removed." + }, + { + "status": 211, + "message": "Unrecognized keys were specified for `automatic_parallelization`.This exit status has been deprecated as the automatic parallellization feature was removed." + }, + { + "status": 300, + "message": "The calculation failed with an unidentified unrecoverable error." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 310, + "message": "The calculation failed with a known unrecoverable error." + }, + { + "status": 320, + "message": "The initialization calculation failed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + }, + { + "status": 501, + "message": "Then ionic minimization cycle converged but the thresholds are exceeded in the final SCF." + }, + { + "status": 710, + "message": "The electronic minimization cycle did not reach self-consistency, but `scf_must_converge` is `False` and/or `electron_maxstep` is 0." + } + ] + }, + "class": "aiida_quantumespresso.workflows.pw.base:PwBaseWorkChain" + }, + "quantumespresso.pw.relax": { + "description": [ + "Workchain to relax a structure using Quantum ESPRESSO pw.x." + ], + "spec": { + "inputs": [ + { + "name": "base", + "required": true, + "valid_types": "Data", + "info": "Inputs for the `PwBaseWorkChain` for the main relax loop." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The inputs structure." + }, + { + "name": "base_final_scf", + "required": false, + "valid_types": "Data", + "info": "Inputs for the `PwBaseWorkChain` for the final scf." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation will be cleaned at the end of execution." + }, + { + "name": "max_meta_convergence_iterations", + "required": false, + "valid_types": "Int", + "info": "The maximum number of variable cell relax iterations in the meta convergence cycle." + }, + { + "name": "meta_convergence", + "required": false, + "valid_types": "Bool", + "info": "If `True` the workchain will perform a meta-convergence on the cell volume." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "volume_convergence", + "required": false, + "valid_types": "Float", + "info": "The volume difference threshold between two consecutive meta convergence iterations." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The `output_parameters` output node of the successful calculation." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "output_atomic_occupations", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "output_band", + "required": false, + "valid_types": "BandsData", + "info": "The `output_band` output node of the successful calculation if present." + }, + { + "name": "output_kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "" + }, + { + "name": "output_structure", + "required": false, + "valid_types": "StructureData", + "info": "The successfully relaxed structure." + }, + { + "name": "output_trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 401, + "message": "the relax PwBaseWorkChain sub process failed" + }, + { + "status": 402, + "message": "the final scf PwBaseWorkChain sub process failed" + } + ] + }, + "class": "aiida_quantumespresso.workflows.pw.relax:PwRelaxWorkChain" + }, + "quantumespresso.q2r.base": { + "description": [ + "Workchain to run a Quantum ESPRESSO q2r.x calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "q2r", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "force_constants", + "required": true, + "valid_types": "ForceConstantsData", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 300, + "message": "The calculation failed with an unrecoverable error." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_quantumespresso.workflows.q2r.base:Q2rBaseWorkChain" + }, + "quantumespresso.xps": { + "description": [ + "Workchain to compute X-ray photoelectron spectra (XPS) for a given structure.", + "", + " The WorkChain itself firstly calls the PwRelaxWorkChain to relax the input structure if", + " required. Then determines the input settings for each XPS calculation automatically using", + " ``get_xspectra_structures()``. The input structures are generated from the standardized", + " structure by converting each to a supercell with cell dimensions of at least 8.0 angstrom", + " in each periodic dimension in order to sufficiently reduce the unphysical interaction", + " of the core-hole with neighbouring images. The size of the minimum size requirement can be", + " overriden by the user if required. Then the standard Delta-Self-Consistent-Field (\u0394SCF)", + " method is used to get the XPS binding energy. Finally, the XPS spectrum is calculated", + " using the Voigt profile." + ], + "spec": { + "inputs": [ + { + "name": "ch_scf", + "required": true, + "valid_types": "Data", + "info": "Input parameters for the basic xps workflow (core-hole SCF)." + }, + { + "name": "core_hole_pseudos", + "required": true, + "valid_types": "UpfData, UpfData", + "info": "Dynamic namespace for pairs of excited-state pseudopotentials for each absorbing element. Must use the mapping \"{element}\" : {Upf}\"." + }, + { + "name": "gipaw_pseudos", + "required": true, + "valid_types": "UpfData, UpfData", + "info": "Dynamic namespace for pairs of ground-state pseudopotentials for each absorbing element. Must use the mapping \"{element}\" : {Upf}\"." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Structure to be used for calculation." + }, + { + "name": "abs_atom_marker", + "required": false, + "valid_types": "Str", + "info": "The name for the Kind representing the absorbing atom in the structure. Will be used in all structures generated in ``get_xspectra_structures`` step." + }, + { + "name": "calc_binding_energy", + "required": false, + "valid_types": "Bool", + "info": "If `True`, run scf calculation for the supercell." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculations will be cleaned at the end of execution." + }, + { + "name": "core_hole_treatments", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional dictionary to set core-hole treatment to all elements present. The default full-core-hole treatment will be used if not specified." + }, + { + "name": "correction_energies", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional dictionary to set the correction energy to all elements present. " + }, + { + "name": "dry_run", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Terminate workchain steps before submitting calculations (test purposes only)." + }, + { + "name": "elements_list", + "required": false, + "valid_types": "List, NoneType", + "info": "The list of elements to be considered for analysis, each must be valid elements of the periodic table." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "relax", + "required": false, + "valid_types": "Data", + "info": "Input parameters for the relax process. If not specified at all, the relaxation step is skipped." + }, + { + "name": "spglib_settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional settings dictionary for the spglib call within ``get_xspectra_structures``." + }, + { + "name": "structure_preparation_settings", + "required": false, + "valid_types": "Dict, Float, Int, Bool, Str", + "info": "Optional settings dictionary for the ``get_xspectra_structures()`` method." + }, + { + "name": "voight_gamma", + "required": false, + "valid_types": "Float", + "info": "The gamma parameter for the Lorenzian broadening in the Voight method." + }, + { + "name": "voight_sigma", + "required": false, + "valid_types": "Float", + "info": "The sigma parameter for the gaussian broadening in the Voight method." + } + ], + "outputs": [ + { + "name": "binding_energies", + "required": true, + "valid_types": "Dict", + "info": "All the binding energy values for each element calculated by the WorkChain." + }, + { + "name": "chemical_shifts", + "required": true, + "valid_types": "Dict", + "info": "All the chemical shift values for each element calculated by the WorkChain." + }, + { + "name": "final_spectra_be", + "required": true, + "valid_types": "XyData", + "info": "The fully-resolved spectra for each element based on binding energy." + }, + { + "name": "final_spectra_cls", + "required": true, + "valid_types": "XyData", + "info": "The fully-resolved spectra for each element based on chemical shift." + }, + { + "name": "output_parameters_ch_scf", + "required": true, + "valid_types": "Dict", + "info": "The output parameters of each ``PwBaseWorkChain`` performed``." + }, + { + "name": "supercell_structure", + "required": true, + "valid_types": "StructureData", + "info": "The supercell of ``outputs.standardized_structure`` used to generate structures for XPS sub-processes." + }, + { + "name": "symmetry_analysis_data", + "required": true, + "valid_types": "Dict", + "info": "The output parameters from ``get_xspectra_structures()``." + }, + { + "name": "optimized_structure", + "required": false, + "valid_types": "StructureData", + "info": "The optimized structure from the ``relax`` process." + }, + { + "name": "output_parameters_relax", + "required": false, + "valid_types": "Dict", + "info": "The output_parameters of the relax step." + }, + { + "name": "output_parameters_scf", + "required": false, + "valid_types": "Dict", + "info": "The output_parameters of the scf step." + }, + { + "name": "standardized_structure", + "required": false, + "valid_types": "StructureData", + "info": "The standardized crystal structure used to generate structures for XPS sub-processes." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 401, + "message": "The Relax sub process failed" + }, + { + "status": 402, + "message": "The SCF Pw sub processes failed" + }, + { + "status": 402, + "message": "One or more CH_SCF Pw sub processes failed" + } + ] + }, + "class": "aiida_quantumespresso.workflows.xps:XpsWorkChain" + }, + "quantumespresso.xspectra.base": { + "description": [ + "Workchain to run a Quantum ESPRESSO xspectra.x calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "xspectra", + "required": true, + "valid_types": "Data", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Mapping where keys are process handler names and the values are a dictionary, where each dictionary can define the ``enabled`` and ``priority`` key, which can be used to toggle the values set on the original process handler declaration." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "An explicit k-points mesh. Either this or `kpoints_distance` has to be provided." + }, + { + "name": "kpoints_distance", + "required": false, + "valid_types": "Float, NoneType", + "info": "The minimum desired distance in 1/\u212b between k-points in reciprocal space. The explicit k-points will be generated automatically by a calculation function based on the input structure." + }, + { + "name": "kpoints_force_parity", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Optional input when constructing the k-points based on a desired `kpoints_distance`. Setting this to `True` will force the k-point mesh to have an even number of points along each lattice vector except for any non-periodic directions." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "spectra", + "required": true, + "valid_types": "XyData", + "info": "" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 202, + "message": "Neither the `kpoints` nor the `kpoints_distance` input was specified." + }, + { + "status": 300, + "message": "The calculation failed with an unrecoverable error." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_quantumespresso.workflows.xspectra.base:XspectraBaseWorkChain" + }, + "quantumespresso.xspectra.core": { + "description": [ + "Workchain to compute X-ray absorption spectra for a given structure using Quantum ESPRESSO.", + "", + " The workflow follows the process required to compute the XAS of an input structure: an SCF calculation is performed", + " using the provided structure, which is then followed by the calculation of the XAS itself by XSpectra. The", + " calculations performed by the WorkChain in a typical run will be:", + "", + " - PwSCF calculation with pw.x of the input structure with a core-hole present.", + " - Generation of core-wavefunction data with upf2plotcore.sh (if requested).", + " - XAS calculation with xspectra.x to compute the Lanczos coefficients and print the XANES spectra for the", + " polarisation vectors requested in the input.", + " - Collation of output data from pw.x and xspectra.x calculations, including a combination of XANES dipole spectra", + " based on polarisation vectors to represent the powder spectrum of the structure (if requested).", + "", + " If ``run_replot = True`` is set in the inputs (defaults to False), the WorkChain will run a second xspectra.x", + " calculation which replots the spectra produced from the ``xs_prod`` step. This option can be very useful for", + " obtaining a final spectrum at low levels of broadening (relative to the default of 0.5 eV), particularly as higher", + " levels of broadening significantly speed up the convergence of the Lanczos procedure. Inputs for the replot", + " calculation are found in the ``xs_plot`` namespace.", + "", + " The core-wavefunction plot derived from the ground-state of the absorbing element can be provided as a top-level", + " input or produced by the WorkChain. If left to the WorkChain, the ground-state pseudopotential assigned to the", + " absorbing element will be used to generate this data using the upf2plotcore.sh utility script (via the", + " ``aiida-shell`` plugin).", + "", + " In its current stage of development, the workflow requires the following:", + "", + " - An input structure where the desired absorbing atom in the system is marked as a separate Kind. The default", + " behaviour for the WorkChain is to set the Kind name as 'X', however this can be changed via the `overrides`", + " dictionary.", + " - A code node for ``upf2plotcore``, configured for the ``aiida-shell`` plugin", + " (https://github.com/sphuber/aiida-shell). Alternatively, a ``SinglefileData`` node from a previous ``ShellJob``", + " run can be supplied under ``inputs.core_wfc_data``.", + " - A suitable pair of pseudopotentials for the element type of the absorbing atom, one for the ground-state occupancy", + " which contains GIPAW informtation for the core level of interest for the XAS (e.g. 1s in the case of a K-edge", + " calculation) and the other containing a core hole. (For the moment this can be passed either via the", + " ``core_hole_pseudos`` field in ``get_builder_from_protocol`` or via the overrides, but will be changed later once", + " full families of core-hole pseudopotentials become available)." + ], + "spec": { + "inputs": [ + { + "name": "eps_vectors", + "required": true, + "valid_types": "List", + "info": "The list of 3-vectors to use in XSpectra sub-processes. The number of sub-lists will subsequently define the number of XSpectra calculations to perform" + }, + { + "name": "scf", + "required": true, + "valid_types": "Data", + "info": "Input parameters for the `pw.x` calculation." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Structure to be used for calculation, with at least one site containing the `abs_atom_marker` as the kind label." + }, + { + "name": "xs_prod", + "required": true, + "valid_types": "Data", + "info": "Input parameters for the `xspectra.x` calculation to compute the Lanczos." + }, + { + "name": "abs_atom_marker", + "required": false, + "valid_types": "Str, NoneType", + "info": "The name for the Kind representing the absorbing atom in the structure. Must corespond to a Kind within the StructureData node supplied to the calculation." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation will be cleaned at the end of execution." + }, + { + "name": "core_wfc_data", + "required": false, + "valid_types": "SinglefileData, NoneType", + "info": "The core wavefunction data file extracted from the ground-state pseudo for the absorbing atom." + }, + { + "name": "dry_run", + "required": false, + "valid_types": "Bool, NoneType", + "info": "Terminate workchain steps before submitting calculations (test purposes only)." + }, + { + "name": "get_powder_spectrum", + "required": false, + "valid_types": "Bool", + "info": "If `True`, the WorkChain will combine XANES dipole spectra computed using the XAS basis vectors defined according to the `get_powder_spectrum` CalcFunction." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "run_replot", + "required": false, + "valid_types": "Bool", + "info": "" + }, + { + "name": "upf2plotcore_code", + "required": false, + "valid_types": "Code, NoneType", + "info": "The code node required for upf2plotcore.sh configured for ``aiida-shell``. Must be provided if `core_wfc_data` is not provided." + }, + { + "name": "xs_plot", + "required": false, + "valid_types": "Data", + "info": "Input parameters for the re-plot `xspectra.x` calculation of the Lanczos." + } + ], + "outputs": [ + { + "name": "parameters_scf", + "required": true, + "valid_types": "Dict", + "info": "The output parameters of the SCF `PwBaseWorkChain`." + }, + { + "name": "parameters_xspectra", + "required": true, + "valid_types": "Dict", + "info": "The output dictionaries of each `XspectraBaseWorkChain` performed" + }, + { + "name": "spectra", + "required": true, + "valid_types": "XyData", + "info": "An XyData node containing all the final spectra produced by the WorkChain." + }, + { + "name": "powder_spectrum", + "required": false, + "valid_types": "XyData", + "info": "The simulated powder spectrum" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 401, + "message": "The SCF sub process failed" + }, + { + "status": 402, + "message": "One or more XSpectra sub processes failed" + }, + { + "status": 403, + "message": "The pseudo for the absorbing element contains no GIPAW information." + } + ] + }, + "class": "aiida_quantumespresso.workflows.xspectra.core:XspectraCoreWorkChain" + }, + "quantumespresso.xspectra.crystal": { + "description": [ + "Workchain to compute all X-ray absorption spectra for a given structure using Quantum ESPRESSO.", + "", + " The WorkChain follows the process required to compute all the K-edge XAS spectra for each", + " element in a given structure. The WorkChain itself firstly calls the PwRelaxWorkChain to", + " relax the input structure, then determines the input settings for each XAS", + " calculation automatically using ``get_xspectra_structures()``:", + "", + " - Firstly the input structure is converted to its conventional standard cell using", + " ``spglib`` and detects the space group number for the conventional cell.", + " - Symmetry analysis of the standardized structure using ``spglib`` is then used to", + " determine the number of non-equivalent atomic sites in the structure for each", + " element considered for analysis.", + "", + " Using the symmetry data returned from ``get_xspectra_structures``, input structures for", + " the XspectraCoreWorkChain are generated from the standardized structure by converting each", + " to a supercell with cell dimensions of at least 8.0 angstroms in each periodic dimension -", + " required in order to sufficiently reduce the unphysical interaction of the core-hole with", + " neighbouring images. The size of the minimum size requirement can be overriden by the", + " user if required. The WorkChain then uses the space group number to set the list of", + " polarisation vectors for the ``XspectraCoreWorkChain`` to compute for all subsequent", + " calculations." + ], + "spec": { + "inputs": [ + { + "name": "core", + "required": true, + "valid_types": "Data", + "info": "Input parameters for the basic xspectra workflow (core-hole SCF + XAS." + }, + { + "name": "core_hole_pseudos", + "required": true, + "valid_types": "UpfData, UpfData", + "info": "Dynamic namespace for pairs of excited-state pseudopotentials for each absorbing element. Must use the mapping \"{element}\" : {Upf}\"." + }, + { + "name": "elements_list", + "required": true, + "valid_types": "List", + "info": "The list of elements to be considered for analysis, each must be a valid element of the periodic table." + }, + { + "name": "gipaw_pseudos", + "required": true, + "valid_types": "UpfData, UpfData", + "info": "Dynamic namespace for pairs of ground-state pseudopotentials for each absorbing element. Must use the mapping \"{element}\" : {Upf}." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Structure to be used for calculation." + }, + { + "name": "abs_atom_marker", + "required": false, + "valid_types": "Str", + "info": "The name for the Kind representing the absorbing atom in the structure. Will be used in all structures generated in ``get_xspectra_structures`` step." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculations will be cleaned at the end of execution." + }, + { + "name": "core_hole_treatments", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional dictionary to set core-hole treatment to given elements present. The default full-core-hole treatment will be used if not specified." + }, + { + "name": "core_wfc_data", + "required": false, + "valid_types": "SinglefileData", + "info": "Input namespace to provide core wavefunction inputs for each element. Must follow the format: ``core_wfc_data__{symbol} = {node}``" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "relax", + "required": false, + "valid_types": "Data", + "info": "Input parameters for the relax process. If not specified at all, the relaxation step is skipped." + }, + { + "name": "return_all_powder_spectra", + "required": false, + "valid_types": "Bool", + "info": "If ``True``, the WorkChain will return all ``powder_spectrum`` nodes from each ``XspectraCoreWorkChain`` sub-process." + }, + { + "name": "spglib_settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Optional settings dictionary for the spglib call within ``get_xspectra_structures``." + }, + { + "name": "structure_preparation_settings", + "required": false, + "valid_types": "Dict, Float, Int, Bool, Str", + "info": "Optional settings dictionary for the ``get_xspectra_structures()`` method." + }, + { + "name": "upf2plotcore_code", + "required": false, + "valid_types": "Code, NoneType", + "info": "Code node for the upf2plotcore.sh ShellJob code." + } + ], + "outputs": [ + { + "name": "final_spectra", + "required": true, + "valid_types": "XyData", + "info": "The fully-resolved spectra for each element" + }, + { + "name": "supercell_structure", + "required": true, + "valid_types": "StructureData", + "info": "The supercell of ``outputs.standardized_structure`` used to generate structures for XSpectra sub-processes." + }, + { + "name": "symmetry_analysis_data", + "required": true, + "valid_types": "Dict", + "info": "The output parameters from ``get_xspectra_structures()``." + }, + { + "name": "optimized_structure", + "required": false, + "valid_types": "StructureData", + "info": "The optimized structure from the ``relax`` process." + }, + { + "name": "parameters_relax", + "required": false, + "valid_types": "Dict", + "info": "The output_parameters of the relax step." + }, + { + "name": "parameters_scf", + "required": false, + "valid_types": "Dict", + "info": "The output parameters of each ``PwBaseWorkChain`` performed in each ``XspectraCoreWorkChain``." + }, + { + "name": "parameters_xspectra", + "required": false, + "valid_types": "Dict", + "info": "The output dictionaries of each `XspectraCalculation` performed" + }, + { + "name": "powder_spectra", + "required": false, + "valid_types": "XyData", + "info": "All the spectra generated by the WorkChain." + }, + { + "name": "standardized_structure", + "required": false, + "valid_types": "StructureData", + "info": "The standardized crystal structure used to generate structures for XSpectra sub-processes." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 401, + "message": "The Relax sub process failed" + }, + { + "status": 402, + "message": "One or more XSpectra workflows failed" + }, + { + "status": 403, + "message": "The pseudos for one or more absorbing elements contain no GIPAW information." + } + ] + }, + "class": "aiida_quantumespresso.workflows.xspectra.crystal:XspectraCrystalWorkChain" + } + }, + "console_scripts": { + "aiida-quantumespresso": "aiida_quantumespresso.cli:cmd_root" + } + }, + "commits_count": 93, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 20 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 13 + }, + { + "colorclass": "red", + "text": "Data", + "count": 2 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 11 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + }, + { + "colorclass": "orange", + "text": "Other (Tools calculations, Tools data orbitals)", + "count": 3 + } + ], + "pip_install_cmd": "pip install aiida-quantumespresso", + "is_installable": "True" + }, + "aiida-quantumespresso-hp": { + "code_home": "https://github.com/sphuber/aiida-quantumespresso-hp", + "entry_point_prefix": "quantumespresso.hp", + "pip_url": "git+https://github.com/sphuber/aiida-quantumespresso-hp", + "name": "aiida-quantumespresso-hp", + "package_name": "aiida_quantumespresso_hp", + "hosted_on": "github.com", + "metadata": { + "author": "Sebastiaan P. Huber", + "author_email": "mail@sphuber.net", + "version": "0.1.0", + "description": "The AiiDA plugin for the Hubbard module of Quantum ESPRESSO", + "classifiers": [ + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 2.7", + "Development Status :: 4 - Beta" + ] + }, + "aiida_version": ">=1.0.0b6,<2.0", + "entry_points": { + "aiida.calculations": { + "quantumespresso.hp": "aiida_quantumespresso_hp.calculations.hp:HpCalculation" + }, + "aiida.parsers": { + "quantumespresso.hp": "aiida_quantumespresso_hp.parsers.hp:HpParser" + }, + "aiida.workflows": { + "quantumespresso.hp.main": "aiida_quantumespresso_hp.workflows.hp.main:HpWorkChain", + "quantumespresso.hp.parallelize_atoms": "aiida_quantumespresso_hp.workflows.hp.parallelize_atoms:HpParallelizeAtomsWorkChain", + "quantumespresso.hp.base": "aiida_quantumespresso_hp.workflows.hp.base:HpBaseWorkChain", + "quantumespresso.hp.hubbard": "aiida_quantumespresso_hp.workflows.hubbard:SelfConsistentHubbardWorkChain" + }, + "console_scripts": { + "launch_calculation_hp": "aiida_quantumespresso_hp.cli.calculations.hp:launch", + "launch_workflow_hp_base": "aiida_quantumespresso_hp.cli.workflows.hp.base:launch", + "launch_workflow_hp_main": "aiida_quantumespresso_hp.cli.workflows.hp.main:launch", + "launch_workflow_hp_hubbard": "aiida_quantumespresso_hp.cli.workflows.hubbard:launch" + } + }, + "commits_count": 0, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 4 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 4 + } + ], + "pip_install_cmd": "pip install git+https://github.com/sphuber/aiida-quantumespresso-hp", + "is_installable": "True" + }, + "aiida-raspa": { + "code_home": "https://github.com/yakutovicha/aiida-raspa", + "entry_point_prefix": "raspa", + "pip_url": "aiida-raspa", + "name": "aiida-raspa", + "package_name": "aiida_raspa", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for RASPA code", + "author": "Aliaksandr Yakutovich", + "author_email": "aliaksandr.yakutovich@epfl.ch", + "license": "MIT License", + "home_page": "https://github.com/yakutovicha/aiida-raspa", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3" + ], + "version": "1.2.0" + }, + "aiida_version": null, + "entry_points": { + "aiida.calculations": { + "raspa": { + "description": [ + "This is a RaspaCalculation, subclass of CalcJob, to prepare input for RASPA code.", + " For information on RASPA, refer to: https://github.com/iraspa/raspa2." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Input parameters" + }, + { + "name": "block_pocket", + "required": false, + "valid_types": "SinglefileData", + "info": "Zeo++ block pocket file" + }, + { + "name": "file", + "required": false, + "valid_types": "SinglefileData", + "info": "Additional input file(s)" + }, + { + "name": "framework", + "required": false, + "valid_types": "CifData", + "info": "Input framework(s)" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "parent_folder", + "required": false, + "valid_types": "RemoteData", + "info": "Remote folder used to continue the same simulation stating from the binary restarts." + }, + { + "name": "retrieved_parent_folder", + "required": false, + "valid_types": "FolderData", + "info": "To use an old calculation as a starting poing for a new one." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "Additional input parameters" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The results of a calculation" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "warnings", + "required": false, + "valid_types": "List", + "info": "Warnings that appeared during the calculation" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 101, + "message": "The retrieved folder does not contain an output file." + }, + { + "status": 102, + "message": "The output does not contain \"Starting simulation\"." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 500, + "message": "The calculation could not be completed due to the lack of time." + } + ] + }, + "class": "aiida_raspa.calculations:RaspaCalculation" + } + }, + "aiida.parsers": { + "raspa": "aiida_raspa.parsers:RaspaParser" + }, + "aiida.workflows": { + "raspa.base": { + "description": [ + "Workchain to run a RASPA calculation with automated error handling and restarts." + ], + "spec": { + "inputs": [ + { + "name": "raspa", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "Maximum number of iterations the work chain will restart the process to finish successfully." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The results of a calculation" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "warnings", + "required": false, + "valid_types": "List", + "info": "Warnings that appeared during the calculation" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_raspa.workchains:RaspaBaseWorkChain" + } + } + }, + "commits_count": 0, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-raspa", + "is_installable": "True" + }, + "aiida-shell": { + "code_home": "https://github.com/sphuber/aiida-shell", + "entry_point_prefix": "core", + "pip_url": "aiida-shell", + "plugin_info": "https://raw.github.com/sphuber/aiida-shell/master/pyproject.toml", + "name": "aiida-shell", + "package_name": "aiida_shell", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin that makes running shell commands easy.", + "author_email": "\"Sebastiaan P. Huber\" ", + "classifiers": [ + "Development Status :: 3 - Alpha", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering" + ], + "version": "0.5.3" + }, + "aiida_version": ">=2.1,<3.0", + "entry_points": { + "aiida.calculations": { + "core.shell": { + "description": [ + "Implementation of :class:`aiida.engine.CalcJob` to run a simple shell command." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "arguments", + "required": false, + "valid_types": "List, NoneType", + "info": "" + }, + { + "name": "filenames", + "required": false, + "valid_types": "Dict, NoneType", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "nodes", + "required": false, + "valid_types": "Data", + "info": "" + }, + { + "name": "outputs", + "required": false, + "valid_types": "List, NoneType", + "info": "" + }, + { + "name": "parser", + "required": false, + "valid_types": "PickledData, NoneType", + "info": "" + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 300, + "message": "Exit status could not be determined: exit status file was not retrieved." + }, + { + "status": 301, + "message": "Exit status could not be determined: exit status file does not contain a valid integer." + }, + { + "status": 302, + "message": "The stdout file was not retrieved." + }, + { + "status": 303, + "message": "One or more output files defined in the `outputs` input were not retrieved: {missing_filepaths}." + }, + { + "status": 310, + "message": "Callable specified in the `parser` input excepted: {exception}." + }, + { + "status": 400, + "message": "The command exited with a non-zero status: {status} {stderr}." + }, + { + "status": 410, + "message": "The command exited with a zero status but the stderr was not empty." + } + ] + }, + "class": "aiida_shell.calculations.shell:ShellJob" + } + }, + "aiida.data": { + "core.code.installed.shell": "aiida_shell.data.code:ShellCode", + "core.pickled": "aiida_shell.data.pickled:PickledData" + }, + "aiida.parsers": { + "core.shell": "aiida_shell.parsers.shell:ShellParser" + } + }, + "commits_count": 46, + "development_status": "alpha", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 2 + } + ], + "pip_install_cmd": "pip install aiida-shell", + "is_installable": "True" + }, + "aiida-siesta": { + "code_home": "https://github.com/siesta-project/aiida_siesta_plugin/tree/master", + "documentation_url": "https://aiida-siesta-plugin.readthedocs.io/", + "entry_point_prefix": "siesta", + "pip_url": "aiida-siesta", + "name": "aiida-siesta", + "package_name": "aiida_siesta", + "hosted_on": "github.com", + "metadata": { + "description": "A plugin for Siesta's basic functionality within the AiiDA framework.", + "author_email": "Albero Garcia , \"Victor M. Garcia-Suarez\" , Emanuele Bosoni , Vladimir Dikan , Pol Febrer ", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ], + "version": "2.0.0" + }, + "aiida_version": ">=2.0.0,<3.0.0", + "entry_points": {}, + "commits_count": 8, + "development_status": "stable", + "summaryinfo": [], + "pip_install_cmd": "pip install aiida-siesta", + "is_installable": "True" + }, + "aiida-spex": { + "code_home": "https://github.com/JuDFTteam/aiida-spex", + "entry_point_prefix": "spex", + "pip_url": "git+https://github.com/JuDFTteam/aiida-spex", + "name": "aiida-spex", + "package_name": "aiida_spex", + "hosted_on": "github.com", + "metadata": { + "author": "The SPEX Team", + "author_email": "a.chandran@fz-juelich.de", + "version": "1.1.2", + "description": "AiiDA plugin for SPEX code", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.0.0b3,<3.0.0", + "entry_points": { + "aiida.calculations": { + "spex.spex": "aiida_spex.calculations.spex:SpexCalculation" + }, + "aiida.data": { + "spex.spexinp": "aiida_spex.data.spexinp:SpexinpData" + }, + "aiida.parsers": { + "spex.spexparser": "aiida_spex.parsers.spex:SpexParser" + }, + "aiida.workflows": { + "spex.job": "aiida_spex.workflows.job:SpexJobWorkchain" + } + }, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/JuDFTteam/aiida-spex" + }, + "aiida-spirit": { + "code_home": "https://github.com/JuDFTteam/aiida-spirit/tree/main", + "documentation_url": "https://aiida-spirit.readthedocs.io/", + "entry_point_prefix": "spirit", + "name": "aiida-spirit", + "pip_url": "aiida-spirit", + "package_name": "aiida_spirit", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for the spirit code", + "author": "The JuDFT Team", + "author_email": "p.ruessmann@fz-juelich.de", + "license": "MIT", + "home_page": "https://github.com/JuDFTteam/aiida-spirit", + "classifiers": [ + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python" + ], + "version": "0.2.2" + }, + "aiida_version": null, + "entry_points": { + "aiida.calculations": { + "spirit": "aiida_spirit.calculations:SpiritCalculation" + }, + "aiida.parsers": { + "spirit": "aiida_spirit.parsers:SpiritParser" + } + }, + "commits_count": 9, + "development_status": "planning", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-spirit" + }, + "aiida-sshonly": { + "code_home": "https://github.com/adegomme/aiida-sshonly", + "development_status": "beta", + "entry_point_prefix": "sshonly", + "pip_url": "aiida-sshonly", + "plugin_info": "https://raw.github.com/adegomme/aiida-sshonly/master/setup.json", + "name": "aiida-sshonly", + "package_name": "aiida_sshonly", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin adding a sshonly transport option, using only SSH to transfer files, avoiding SFTP, in case it's blocked or non functional on a remote system", + "author": "adegomme", + "license": "MIT", + "home_page": "https://github.com/adegomme/aiida-sshonly", + "classifiers": [ + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python" + ], + "version": "0.1.0a3" + }, + "aiida_version": ">=1.3.0,<2.0.0", + "entry_points": { + "aiida.transports": { + "ssh_only": "aiida_sshonly.transports.sshonly:SshOnlyTransport" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "orange", + "text": "Other (Transports)", + "count": 1 + } + ], + "pip_install_cmd": "pip install --pre aiida-sshonly", + "is_installable": "True" + }, + "aiida-statefile-schedulers": { + "code_home": "https://github.com/dev-zero/aiida-statefile-schedulers", + "development_status": "beta", + "entry_point_prefix": "statefile_schedulers", + "pip_url": "aiida-statefile-schedulers", + "name": "aiida-statefile-schedulers", + "package_name": "aiida_statefile_schedulers", + "hosted_on": "github.com", + "metadata": { + "description": "Simple statefile-driven task schedulers for AiiDA", + "author": "Tiziano M\u00fcller", + "author_email": "tm@dev-zero.ch", + "license": "MIT", + "home_page": "https://github.com/dev-zero/aiida-statefile-schedulers", + "classifiers": [ + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python" + ], + "version": "0.2.1" + }, + "aiida_version": null, + "entry_points": { + "aiida.schedulers": { + "statefile_schedulers.direct": "aiida_statefile_schedulers.schedulers.direct:StatefileDirectScheduler" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "orange", + "text": "Other (Schedulers)", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-statefile-schedulers", + "is_installable": "True" + }, + "aiida-strain": { + "code_home": "https://github.com/greschd/aiida-strain", + "documentation_url": "https://aiida-strain.readthedocs.io", + "entry_point_prefix": "strain", + "pip_url": "aiida-strain", + "name": "aiida-strain", + "package_name": "aiida_strain", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA Plugin for applying strain to structures", + "author": "Dominik Gresch", + "author_email": "greschd@gmx.ch", + "license": "Apache 2.0", + "home_page": "https://aiida-strain.readthedocs.io", + "classifiers": [ + "Development Status :: 3 - Alpha", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "0.2.0" + }, + "aiida_version": null, + "entry_points": { + "aiida.workflows": { + "strain.apply_strains": { + "description": [ + "Workchain to create strained structures from a given input structure." + ], + "spec": { + "inputs": [ + { + "name": "strain_kind", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "strain_parameters", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "strain_strengths", + "required": true, + "valid_types": "List", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_strain:ApplyStrains" + }, + "strain.apply_strains_with_symmetry": { + "description": [ + "Workchain to create strained structures from an input structure, and select the symmetries which are compatible with the strained structure from a set of given input symmetries." + ], + "spec": { + "inputs": [ + { + "name": "strain_kind", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "strain_parameters", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "strain_strengths", + "required": true, + "valid_types": "List", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "" + }, + { + "name": "symmetries", + "required": true, + "valid_types": "SinglefileData", + "info": "" + }, + { + "name": "symmetry_repr_code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_strain:ApplyStrainsWithSymmetry" + } + } + }, + "commits_count": 0, + "development_status": "alpha", + "summaryinfo": [ + { + "colorclass": "green", + "text": "Workflows", + "count": 2 + } + ], + "pip_install_cmd": "pip install aiida-strain", + "is_installable": "True" + }, + "aiida-supercell": { + "code_home": "https://github.com/pzarabadip/aiida-supercell", + "development_status": "stable", + "documentation_url": "https://aiida-supercell.readthedocs.io/", + "entry_point_prefix": "supercell", + "pip_url": "git+https://github.com/pzarabadip/aiida-supercell", + "name": "aiida-supercell", + "package_name": "aiida_supercell", + "hosted_on": "github.com", + "metadata": { + "author": "Pezhman Zarabadi-Poor", + "author_email": "pzarabadip@gmail.com", + "version": "1.0.1", + "description": "AiiDA Plugin for Supercell program", + "classifiers": [ + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9" + ] + }, + "aiida_version": ">=1.0.0,<2.0", + "entry_points": { + "aiida.calculations": { + "supercell": { + "description": [ + "This is a SupercellCalculation, subclass of JobCalculation,", + " to prepare input for enumerating structures using Supercell program" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData, SinglefileData", + "info": "Input structure" + }, + { + "name": "supercell_size", + "required": true, + "valid_types": "List", + "info": "Supercell size for enumeration" + }, + { + "name": "calculate_coulomb_energies", + "required": false, + "valid_types": "Bool", + "info": "Whether to calculate Coulomb energies" + }, + { + "name": "charge_balance_method", + "required": false, + "valid_types": "Str", + "info": "Method to use for charge balancing" + }, + { + "name": "charges", + "required": false, + "valid_types": "Dict", + "info": "Dictionary of formal charges to be used" + }, + { + "name": "merge_symmetric", + "required": false, + "valid_types": "Bool", + "info": "Whether to merge symmetrically distinct configurations" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "random_seed", + "required": false, + "valid_types": "Int", + "info": "Random seed number" + }, + { + "name": "sample_structures", + "required": false, + "valid_types": "Dict", + "info": "How to sample structures from huge configuration space" + }, + { + "name": "save_as_archive", + "required": false, + "valid_types": "Bool", + "info": "Whether to save resulting structures as archive" + }, + { + "name": "tolerance", + "required": false, + "valid_types": "Float", + "info": "The maximum distance (in Angstroms) between sites that should be contained within the same group." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "the results of the calculation" + }, + { + "name": "output_structures", + "required": true, + "valid_types": "StructureData", + "info": "relaxed structure" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 101, + "message": "Input structure could not be processed." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + } + ] + }, + "class": "aiida_supercell.calculations:SupercellCalculation" + } + }, + "aiida.parsers": { + "supercell": "aiida_supercell.parsers:SupercellParser" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/pzarabadip/aiida-supercell", + "is_installable": "True" + }, + "aiida-symmetry-representation": { + "code_home": "https://github.com/greschd/aiida_symmetry_representation", + "documentation_url": "https://aiida-symmetry-representation.readthedocs.io", + "entry_point_prefix": "symmetry_representation", + "pip_url": "aiida-symmetry-representation", + "name": "aiida-symmetry-representation", + "package_name": "aiida_symmetry_representation", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA Plugin for symmetry representations.", + "author": "Dominik Gresch", + "author_email": "greschd@gmx.ch", + "license": "Apache 2.0", + "home_page": "https://aiida-symmetry-representation.readthedocs.io", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "0.2.0" + }, + "aiida_version": null, + "entry_points": { + "aiida.calculations": { + "symmetry_representation.filter_symmetries": { + "description": [ + "Calculation class to run the ``symmetry-repr filter_symmetries`` command." + ], + "spec": { + "inputs": [ + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "Structure with which the filtered symmetries should be compatible." + }, + { + "name": "symmetries", + "required": true, + "valid_types": "SinglefileData", + "info": "File containing the symmetries (in HDF5 format)." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "symmetries", + "required": true, + "valid_types": "SinglefileData", + "info": "The HDF5 file containing the symmetries which are compatible with the structure." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + } + ] + }, + "class": "aiida_symmetry_representation.calculations.filter_symmetries:FilterSymmetriesCalculation" + } + }, + "aiida.parsers": { + "symmetry_representation.symmetry": "aiida_symmetry_representation.parsers.symmetries:SymmetriesParser" + } + }, + "commits_count": 0, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-symmetry-representation", + "is_installable": "True" + }, + "aiida-tbextraction": { + "code_home": "https://github.com/greschd/aiida-tbextraction", + "documentation_url": "https://aiida-tbextraction.readthedocs.io/", + "entry_point_prefix": "tbextraction", + "pip_url": "aiida-tbextraction", + "name": "aiida-tbextraction", + "package_name": "aiida_tbextraction", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA Plugin for extracting tight-binding models", + "author": "Dominik Gresch", + "author_email": "greschd@gmx.ch", + "license": "Apache 2.0", + "home_page": "https://aiida-tbextraction.readthedocs.io", + "classifiers": [ + "Development Status :: 4 - Beta", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "0.2.0b1" + }, + "aiida_version": null, + "entry_points": { + "aiida.workflows": { + "tbextraction.fp_run.base": "aiida_tbextraction.fp_run:FirstPrinciplesRunBase", + "tbextraction.fp_run.reference_bands.base": "aiida_tbextraction.fp_run.reference_bands:ReferenceBandsBase", + "tbextraction.fp_run.wannier_input.base": "aiida_tbextraction.fp_run.wannier_input:WannierInputBase", + "tbextraction.calculate_tb": "aiida_tbextraction.calculate_tb:TightBindingCalculation", + "tbextraction.model_evaluation.base": "aiida_tbextraction.model_evaluation:ModelEvaluationBase", + "tbextraction.model_evaluation.band_difference": "aiida_tbextraction.model_evaluation:BandDifferenceModelEvaluation", + "tbextraction.energy_windows.run_window": "aiida_tbextraction.energy_windows.run_window:RunWindow", + "tbextraction.energy_windows.window_search": "aiida_tbextraction.energy_windows.window_search:WindowSearch", + "tbextraction.optimize_fp_tb": "aiida_tbextraction.optimize_fp_tb:OptimizeFirstPrinciplesTightBinding", + "tbextraction.optimize_strained_fp_tb": "aiida_tbextraction.optimize_strained_fp_tb:OptimizeStrainedFirstPrinciplesTightBinding" + } + }, + "commits_count": 0, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "green", + "text": "Workflows", + "count": 10 + } + ], + "pip_install_cmd": "pip install --pre aiida-tbextraction", + "is_installable": "True" + }, + "aiida-tbmodels": { + "code_home": "https://github.com/greschd/aiida-tbmodels", + "documentation_url": "https://aiida-tbmodels.readthedocs.io", + "entry_point_prefix": "tbmodels", + "pip_url": "aiida-tbmodels", + "name": "aiida-tbmodels", + "package_name": "aiida_tbmodels", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA Plugin for running TBmodels", + "author": "Dominik Gresch", + "author_email": "greschd@gmx.ch", + "license": "Apache 2.0", + "home_page": "https://aiida-tbmodels.readthedocs.io", + "classifiers": [ + "Development Status :: 3 - Alpha", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "0.3.0" + }, + "aiida_version": null, + "entry_points": { + "aiida.calculations": { + "tbmodels.eigenvals": { + "description": [ + "Calculation class for the 'tbmodels eigenvals' command, which computes the eigenvalues from a given tight-binding model." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "Kpoints for which the eigenvalues are calculated." + }, + { + "name": "tb_model", + "required": true, + "valid_types": "SinglefileData", + "info": "Input model in TBmodels HDF5 format." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "bands", + "required": true, + "valid_types": "BandsData", + "info": "The calculated eigenvalues of the model at given k-points." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 300, + "message": "The result HDF5 file was not found." + }, + { + "status": 301, + "message": "The standard error file contains an unknown TBmodels exception." + } + ] + }, + "class": "aiida_tbmodels.calculations.eigenvals:EigenvalsCalculation" + }, + "tbmodels.parse": { + "description": [ + "Calculation plugin for the 'tbmodels parse' command, which creates a", + " TBmodels tight-binding model from the Wannier90 output." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "wannier_folder", + "required": true, + "valid_types": "FolderData", + "info": "Folder containing the Wannier90 output data." + }, + { + "name": "distance_ratio_threshold", + "required": false, + "valid_types": "Float", + "info": "Determines the minimum ratio between nearest and next-nearest atom when parsing with 'nearest_atom' mode." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "pos_kind", + "required": false, + "valid_types": "Str", + "info": "Determines how the orbital positions are parsed." + }, + { + "name": "sparsity", + "required": false, + "valid_types": "Str", + "info": "Set the sparsity of the output model. Requires TBmodels version >=1.4." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "tb_model", + "required": true, + "valid_types": "SinglefileData", + "info": "Output model in TBmodels HDF5 format." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 300, + "message": "The output model HDF5 file was not found." + }, + { + "status": 301, + "message": "The standard error file contains an unknown TBmodels exception." + }, + { + "status": 301, + "message": "The seedname_wsvec.dat file is empty or incomplete." + }, + { + "status": 401, + "message": "The nearest atom to use for position parsing is ambiguous." + } + ] + }, + "class": "aiida_tbmodels.calculations.parse:ParseCalculation" + }, + "tbmodels.slice": { + "description": [ + "Calculation plugin for the 'tbmodels slice' command, which re-orders or slices orbitals of a tight-binding model." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "slice_idx", + "required": true, + "valid_types": "List", + "info": "Indices of the orbitals which are sliced from the model." + }, + { + "name": "tb_model", + "required": true, + "valid_types": "SinglefileData", + "info": "Input model in TBmodels HDF5 format." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "sparsity", + "required": false, + "valid_types": "Str", + "info": "Set the sparsity of the output model. Requires TBmodels version >=1.4." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "tb_model", + "required": true, + "valid_types": "SinglefileData", + "info": "Output model in TBmodels HDF5 format." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 300, + "message": "The output model HDF5 file was not found." + }, + { + "status": 301, + "message": "The standard error file contains an unknown TBmodels exception." + } + ] + }, + "class": "aiida_tbmodels.calculations.slice:SliceCalculation" + }, + "tbmodels.symmetrize": { + "description": [ + "Calculation class for the 'tbmodels symmetrize' command, which creates a symmetrized tight-binding model from a tight-binding model and symmetry representations." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "symmetries", + "required": true, + "valid_types": "SinglefileData", + "info": "File containing the symmetries in HDF5 format." + }, + { + "name": "tb_model", + "required": true, + "valid_types": "SinglefileData", + "info": "Input model in TBmodels HDF5 format." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "sparsity", + "required": false, + "valid_types": "Str", + "info": "Set the sparsity of the output model. Requires TBmodels version >=1.4." + } + ], + "outputs": [ + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "tb_model", + "required": true, + "valid_types": "SinglefileData", + "info": "Output model in TBmodels HDF5 format." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 300, + "message": "The output model HDF5 file was not found." + }, + { + "status": 301, + "message": "The standard error file contains an unknown TBmodels exception." + }, + { + "status": 301, + "message": "The type of the given symmetries object is incorrect." + } + ] + }, + "class": "aiida_tbmodels.calculations.symmetrize:SymmetrizeCalculation" + } + }, + "aiida.parsers": { + "tbmodels.model": "aiida_tbmodels.parsers.model:ModelParser" + } + }, + "commits_count": 0, + "development_status": "alpha", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 4 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-tbmodels", + "is_installable": "True" + }, + "aiida-tcod": { + "code_home": "https://github.com/aiidateam/aiida-tcod", + "development_status": "beta", + "entry_point_prefix": "tcod", + "pip_url": "git+https://github.com/aiidateam/aiida-tcod", + "name": "aiida-tcod", + "package_name": "aiida_tcod", + "hosted_on": "github.com", + "metadata": { + "author": "The AiiDA team", + "author_email": "developers@aiida.net", + "version": "0.1.0a0", + "description": "AiiDA plugin to interact with the TCOD", + "classifiers": [ + "Programming Language :: Python" + ] + }, + "aiida_version": ">=1.0.0b1", + "entry_points": { + "aiida.tools.dbexporters": { + "tcod": "aiida.tools.dbexporters.tcod" + } + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "orange", + "text": "Other (Database Exporters)", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/aiidateam/aiida-tcod", + "is_installable": "True" + }, + "aiida-uppasd": { + "code_home": "https://github.com/uppasd/aiida-uppasd", + "documentation_url": "https://github.com/uppasd/aiida-uppasd/blob/master/README.md", + "entry_point_prefix": "uppasd", + "pip_url": "git+https://github.com/unkcpz/aiida-uppasd", + "name": "aiida-uppasd", + "package_name": "aiida_uppasd", + "hosted_on": "github.com", + "metadata": { + "author": "Qichen Xu, Anders Bergman, Anna Delin, Jonathan Chico", + "author_email": "qichenx@kth.se", + "version": "0.1.0", + "description": "Interface for UppASD and AiiDA", + "classifiers": [ + "Programming Language :: Python", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Framework :: AiiDA" + ] + }, + "aiida_version": ">=1.1.0,<2.0.0", + "entry_points": { + "aiida.calculations": { + "UppASD_core_calculations": "UppASD_AiiDA.calculations.core_calcs:UppASD" + }, + "aiida.parsers": { + "UppASD_core_parsers": "UppASD_AiiDA.parsers.core_parser:SpinDynamic_core_parser" + } + }, + "commits_count": 0, + "development_status": "planning", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install git+https://github.com/unkcpz/aiida-uppasd" + }, + "aiida-vasp": { + "code_home": "https://github.com/aiida-vasp/aiida-vasp", + "documentation_url": "https://aiida-vasp.readthedocs.io/", + "entry_point_prefix": "vasp", + "pip_url": "aiida-vasp", + "plugin_info": "https://raw.githubusercontent.com/aiida-vasp/aiida-vasp/master/setup.json", + "name": "aiida-vasp", + "package_name": "aiida_vasp", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for running VASP calculations and workflows.", + "author_email": "Espen Flage-Larsen ", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "3.0.1" + }, + "aiida_version": ">=2.4,<3.0", + "entry_points": { + "aiida.calculations": { + "vasp.immigrant": { + "description": [ + "Parse VASP output objects stored in a specified directory.", + "", + " Simulate running the VaspCalculation up to the point where it can be", + " retrieved and parsed, then hand over control to the runner for the rest.", + "", + " Usage examples", + " --------------", + " Immigrant calculation can be perfomed as follows.", + "", + " ::", + "", + " code = Code.get_from_string('vasp@local')", + " folder = '/home/username/vasp-calc-dir'", + " settings = {'parser_settings': {'add_energies': True,", + " 'add_forces': True,", + " 'electronic_step_energies': True}}", + " VaspImmigrant = CalculationFactory('vasp.immigrant')", + " builder = VaspImmigrant.get_builder_from_folder(code,", + " folder,", + " settings=settings)", + " submit(builder)", + "", + " Instead of ``builder``, inputs dict is obtained similarly as", + "", + " ::", + "", + " code = Code.get_from_string('vasp@local')", + " folder = '/home/username/vasp-calc-dir'", + " settings = {'parser_settings': {'add_energies': True,", + " 'add_forces': True,", + " 'electronic_step_energies': True}}", + " VaspImmigrant = CalculationFactory('vasp.immigrant')", + " inputs = VaspImmigrant.get_inputs_from_folder(code,", + " folder,", + " settings=settings)", + " submit(VaspImmigrant, **inputs)", + "", + " Note", + " ----", + " The defaul metadata is set automatically as follows::", + "", + " {'options': {'max_wallclock_seconds': 1,", + " 'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1}}}", + "", + " Specific scheduler may require setting ``resources`` differently", + " (e.g., sge ``'parallel_env'``).", + "", + " ``get_inputs_from_folder`` and ``get_builder_from_folder`` accept several", + " kwargs, see the docstring of ``get_inputs_from_folder``." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "The kpoints to use (KPOINTS)." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "The VASP input parameters (INCAR)." + }, + { + "name": "potential", + "required": true, + "valid_types": "PotcarData", + "info": "The potentials (POTCAR)." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "The input structure (POSCAR)." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ChargedensityData", + "info": "The charge density. (CHGCAR)" + }, + { + "name": "dynamics", + "required": false, + "valid_types": "Dict", + "info": "The VASP parameters related to ionic dynamics, e.g. flags to set the selective dynamics" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "remote_workdir", + "required": false, + "valid_types": "str", + "info": "" + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData", + "info": "A remote folder to restart from if need be" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "Additional parameters not related to VASP itself." + }, + { + "name": "wavefunctions", + "required": false, + "valid_types": "WavefunData", + "info": "The wave function coefficients. (WAVECAR)" + } + ], + "outputs": [ + { + "name": "custom_outputs", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "misc", + "required": true, + "valid_types": "Dict", + "info": "The output parameters containing smaller quantities that do not depend on system size." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "bands", + "required": false, + "valid_types": "BandsData", + "info": "The output band structure." + }, + { + "name": "born_charges", + "required": false, + "valid_types": "ArrayData", + "info": "The output Born effective charges." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output charge density." + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "The output charge density CHGCAR file." + }, + { + "name": "dielectrics", + "required": false, + "valid_types": "ArrayData", + "info": "The output dielectric functions." + }, + { + "name": "dos", + "required": false, + "valid_types": "ArrayData", + "info": "The output dos." + }, + { + "name": "dynmat", + "required": false, + "valid_types": "ArrayData", + "info": "The output dynamical matrix." + }, + { + "name": "energies", + "required": false, + "valid_types": "ArrayData", + "info": "The output total energies." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "The output forces." + }, + { + "name": "hessian", + "required": false, + "valid_types": "ArrayData", + "info": "The output Hessian matrix." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "The output k-points." + }, + { + "name": "magnetization_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output magnetization density." + }, + { + "name": "projectors", + "required": false, + "valid_types": "ArrayData", + "info": "The output projectors of decomposition." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "The output of the site magnetization" + }, + { + "name": "stress", + "required": false, + "valid_types": "ArrayData", + "info": "The output stress." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The output structure." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The output trajectory data." + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "The output plane wave coefficients file." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 333, + "message": "VASP did not produce any output and did likely not execute properly." + }, + { + "status": 350, + "message": "the retrieved folder data node could not be accessed." + }, + { + "status": 351, + "message": "the retrieved_temporary folder data node could not be accessed." + }, + { + "status": 352, + "message": "an object that is marked by the parser as critical is missing." + }, + { + "status": 700, + "message": "Calculation did not reach the end of execution." + }, + { + "status": 701, + "message": "The electronic structure is not converged." + }, + { + "status": 702, + "message": "The ionic relaxation is not converged." + }, + { + "status": 703, + "message": "VASP calculation encountered a critical error: {error_message}." + }, + { + "status": 704, + "message": "Outputs for diagnosis are missing, please make sure `run_status` and `notifications` quantities are requested for parsing." + }, + { + "status": 1001, + "message": "parsing an object has failed." + }, + { + "status": 1002, + "message": "the parser is not able to parse the {quantity} quantity" + }, + { + "status": 1003, + "message": "the vasprun.xml was truncated and recovery parsing failed to parse at least one of the requested quantities: {quantities}, very likely the VASP calculation did not run properly" + }, + { + "status": 1004, + "message": "the parser is not able to compose one or more output nodes: {nodes}" + }, + { + "status": 1005, + "message": "Overflow detected in XML while parsing." + } + ] + }, + "class": "aiida_vasp.calcs.immigrant:VaspImmigrant" + }, + "vasp.neb": { + "description": [ + "NEB calculations using VASP", + "", + " ------------------------------------", + " Calculations for performing NEB calculations.", + " NEB calculations requires standard VASP inputs, but POSCAR are placed in", + " folder names 00, 01, 02... N for N-1 number of images.", + "", + " Input frames should be placed under the ``neb_images`` input namespace as a dictionary like::", + " {", + " 'image_00': structure_1,", + " 'image_01': structure_2", + " ....", + " }", + "", + " Output of individual frames are placed in the corresponding namespace under the same convention." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "final_structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "The input structure (POSCAR) for the final image." + }, + { + "name": "initial_structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "The input structure (POSCAR) for initial image." + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "The kpoints to use (KPOINTS)." + }, + { + "name": "neb_images", + "required": true, + "valid_types": "StructureData, CifData", + "info": "Starting structure for the NEB images" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "The VASP input parameters (INCAR)." + }, + { + "name": "potential", + "required": true, + "valid_types": "PotcarData", + "info": "The potentials (POTCAR)." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ChargedensityData", + "info": "The charge density. (CHGCAR)" + }, + { + "name": "dynamics", + "required": false, + "valid_types": "Dict", + "info": "The VASP parameters related to ionic dynamics, e.g. flags to set the selective dynamics" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData", + "info": "A remote folder to restart from if need be" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "Additional parameters not related to VASP itself." + }, + { + "name": "wavefunctions", + "required": false, + "valid_types": "WavefunData", + "info": "The wave function coefficients. (WAVECAR)" + } + ], + "outputs": [ + { + "name": "custom_outputs", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "misc", + "required": true, + "valid_types": "Dict", + "info": "Per-image misc output." + }, + { + "name": "neb_misc", + "required": true, + "valid_types": "Dict", + "info": "NEB related data combined for each image" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "NEB images" + }, + { + "name": "bands", + "required": false, + "valid_types": "BandsData", + "info": "The output band structure." + }, + { + "name": "born_charges", + "required": false, + "valid_types": "ArrayData", + "info": "The output Born effective charges." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output charge density." + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "The output charge density." + }, + { + "name": "dielectrics", + "required": false, + "valid_types": "ArrayData", + "info": "The output dielectric functions." + }, + { + "name": "dos", + "required": false, + "valid_types": "ArrayData", + "info": "The output dos." + }, + { + "name": "dynmat", + "required": false, + "valid_types": "ArrayData", + "info": "The output dynamical matrix." + }, + { + "name": "energies", + "required": false, + "valid_types": "ArrayData", + "info": "The output total energies." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "The output forces." + }, + { + "name": "hessian", + "required": false, + "valid_types": "ArrayData", + "info": "The output Hessian matrix." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "Kpoints for each image." + }, + { + "name": "magnetization_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output magnetization density." + }, + { + "name": "projectors", + "required": false, + "valid_types": "ArrayData", + "info": "The output projectors of decomposition." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "The output of the site magnetization for each image." + }, + { + "name": "stress", + "required": false, + "valid_types": "ArrayData", + "info": "The output stress." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The output trajectory data." + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "The output file containing the plane wave coefficients." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 333, + "message": "VASP did not produce any output files and did likely not execute properly." + }, + { + "status": 350, + "message": "the retrieved folder data node could not be accessed." + }, + { + "status": 351, + "message": "the retrieved_temporary folder data node could not be accessed." + }, + { + "status": 352, + "message": "an object that is marked by the parser as critical is missing." + }, + { + "status": 352, + "message": "a file that is marked by the parser as critical is missing." + }, + { + "status": 700, + "message": "Calculation did not reach the end of execution." + }, + { + "status": 701, + "message": "The electronic structure is not converged." + }, + { + "status": 702, + "message": "The ionic relaxation is not converged." + }, + { + "status": 703, + "message": "VASP calculation encountered a critical error: {error_message}." + }, + { + "status": 704, + "message": "Outputs for diagnosis are missing, please make sure the `neb_data` and `run_status` quantities are requested for parsing." + }, + { + "status": 1001, + "message": "parsing an object has failed." + }, + { + "status": 1001, + "message": "parsing a file has failed." + }, + { + "status": 1002, + "message": "the parser is not able to parse the {quantity} quantity" + }, + { + "status": 1003, + "message": "the vasprun.xml was truncated and recovery parsing failed to parse at least one of the requested quantities: {quantities}, very likely the VASP calculation did not run properly" + }, + { + "status": 1004, + "message": "the parser is not able to compose one or more output nodes: {nodes}" + }, + { + "status": 1005, + "message": "Overflow detected in XML while parsing." + } + ] + }, + "class": "aiida_vasp.calcs.neb:VaspNEBCalculation" + }, + "vasp.vasp": { + "description": [ + "General-purpose VASP calculation.", + "", + " ---------------------------------", + " By default retrieves only the 'OUTCAR', 'vasprun.xml', 'EIGENVAL', 'DOSCAR'", + " and Wannier90 input / output objects. These objects are deleted after parsing.", + " Additional retrieve objects can be specified via the", + " ``settings['ADDITIONAL_RETRIEVE_TEMPORARY_LIST']`` input. In addition, if you want to keep", + " any objects after parsing, put them in ``settings['ADDITIONAL_RETRIEVE_LIST']`` which is empty", + " by default.", + "", + " Floating point precision for writing POSCAR objects can be adjusted using", + " ``settings['poscar_precision']``, default: 10", + "", + " The following assumes you are familiar with the AiiDA data structures and", + " how to set up and run an AiiDA calculation in general.", + "", + " Example usage::", + "", + " from aiida.orm import CalculationFactory, DataFactory", + " from aiida.work import submit", + "", + " proc = CalculationFactory('vasp.vasp').process()", + " inputs = proc.get_inputs_template()", + " inputs.parameter = ", + " inputs.structure = ", + " inputs.kpoints = ", + " inputs.settings = ", + " inputs.potential = DataFactory('vasp.potcar').get_potcars_from_structure(structure, ...)", + " inputs.code = ", + "", + " submit(proc, **inputs)", + "", + " Which is very similar to the workchain example.", + "", + " Since we do not want the content parsers to know about the AiiDA infrastructure,", + " i.e. processes etc. we have no access to the exit codes defined on the CalcJob.", + " We thus have to deal with failures in parsing directly in the write calls here." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "The kpoints to use (KPOINTS)." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "The VASP input parameters (INCAR)." + }, + { + "name": "potential", + "required": true, + "valid_types": "PotcarData", + "info": "The potentials (POTCAR)." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "The input structure (POSCAR)." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ChargedensityData", + "info": "The charge density. (CHGCAR)" + }, + { + "name": "dynamics", + "required": false, + "valid_types": "Dict", + "info": "The VASP parameters related to ionic dynamics, e.g. flags to set the selective dynamics" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData", + "info": "A remote folder to restart from if need be" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "Additional parameters not related to VASP itself." + }, + { + "name": "wavefunctions", + "required": false, + "valid_types": "WavefunData", + "info": "The wave function coefficients. (WAVECAR)" + } + ], + "outputs": [ + { + "name": "custom_outputs", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "misc", + "required": true, + "valid_types": "Dict", + "info": "The output parameters containing smaller quantities that do not depend on system size." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "bands", + "required": false, + "valid_types": "BandsData", + "info": "The output band structure." + }, + { + "name": "born_charges", + "required": false, + "valid_types": "ArrayData", + "info": "The output Born effective charges." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output charge density." + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "The output charge density CHGCAR file." + }, + { + "name": "dielectrics", + "required": false, + "valid_types": "ArrayData", + "info": "The output dielectric functions." + }, + { + "name": "dos", + "required": false, + "valid_types": "ArrayData", + "info": "The output dos." + }, + { + "name": "dynmat", + "required": false, + "valid_types": "ArrayData", + "info": "The output dynamical matrix." + }, + { + "name": "energies", + "required": false, + "valid_types": "ArrayData", + "info": "The output total energies." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "The output forces." + }, + { + "name": "hessian", + "required": false, + "valid_types": "ArrayData", + "info": "The output Hessian matrix." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "The output k-points." + }, + { + "name": "magnetization_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output magnetization density." + }, + { + "name": "projectors", + "required": false, + "valid_types": "ArrayData", + "info": "The output projectors of decomposition." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "The output of the site magnetization" + }, + { + "name": "stress", + "required": false, + "valid_types": "ArrayData", + "info": "The output stress." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The output structure." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The output trajectory data." + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "The output plane wave coefficients file." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 333, + "message": "VASP did not produce any output and did likely not execute properly." + }, + { + "status": 350, + "message": "the retrieved folder data node could not be accessed." + }, + { + "status": 351, + "message": "the retrieved_temporary folder data node could not be accessed." + }, + { + "status": 352, + "message": "an object that is marked by the parser as critical is missing." + }, + { + "status": 700, + "message": "Calculation did not reach the end of execution." + }, + { + "status": 701, + "message": "The electronic structure is not converged." + }, + { + "status": 702, + "message": "The ionic relaxation is not converged." + }, + { + "status": 703, + "message": "VASP calculation encountered a critical error: {error_message}." + }, + { + "status": 704, + "message": "Outputs for diagnosis are missing, please make sure `run_status` and `notifications` quantities are requested for parsing." + }, + { + "status": 1001, + "message": "parsing an object has failed." + }, + { + "status": 1002, + "message": "the parser is not able to parse the {quantity} quantity" + }, + { + "status": 1003, + "message": "the vasprun.xml was truncated and recovery parsing failed to parse at least one of the requested quantities: {quantities}, very likely the VASP calculation did not run properly" + }, + { + "status": 1004, + "message": "the parser is not able to compose one or more output nodes: {nodes}" + }, + { + "status": 1005, + "message": "Overflow detected in XML while parsing." + } + ] + }, + "class": "aiida_vasp.calcs.vasp:VaspCalculation" + }, + "vasp.vasp2w90": "aiida_vasp.calcs.vasp2w90:Vasp2w90Calculation" + }, + "aiida.cmdline.data": { + "vasp-potcar": "aiida_vasp.commands.potcar:potcar" + }, + "aiida.data": { + "vasp.archive": "aiida_vasp.data.archive:ArchiveData", + "vasp.chargedensity": "aiida_vasp.data.chargedensity:ChargedensityData", + "vasp.potcar": "aiida_vasp.data.potcar:PotcarData", + "vasp.potcar_file": "aiida_vasp.data.potcar:PotcarFileData", + "vasp.wavefun": "aiida_vasp.data.wavefun:WavefunData" + }, + "aiida.groups": { + "vasp.potcar": "aiida_vasp.data.potcar:PotcarGroup" + }, + "aiida.parsers": { + "vasp.neb": "aiida_vasp.parsers.neb:VtstNebParser", + "vasp.vasp": "aiida_vasp.parsers.vasp:VaspParser", + "vasp.vasp2w90": "aiida_vasp.parsers.vasp2w90:Vasp2w90Parser" + }, + "aiida.workflows": { + "vasp.bands": { + "description": [ + "Extract the band structure using k-point paths fetched from SeeKpath." + ], + "spec": { + "inputs": [ + { + "name": "bands", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "dynamics", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "potential_family", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "potential_mapping", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "restart_folder", + "required": true, + "valid_types": "RemoteData", + "info": "\n The folder to restart in, which contains the outputs from the prerun to extract the charge density.\n " + }, + { + "name": "smearing", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "" + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "\n If True, clean the work dir upon the completion of a successfull calculation.\n " + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "\n The maximum number of iterations to perform.\n " + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "parameters", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "Site magnetization to be used as MAGMOM" + }, + { + "name": "verbose", + "required": false, + "valid_types": "Bool", + "info": "\n If True, enable more detailed output during workchain execution.\n " + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "" + } + ], + "outputs": [ + { + "name": "bands", + "required": true, + "valid_types": "BandsData", + "info": "" + }, + { + "name": "custom_outputs", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "misc", + "required": true, + "valid_types": "Dict", + "info": "The output parameters containing smaller quantities that do not depend on system size." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "born_charges", + "required": false, + "valid_types": "ArrayData", + "info": "The output Born effective charges." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output charge density." + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "The output charge density CHGCAR file." + }, + { + "name": "dielectrics", + "required": false, + "valid_types": "ArrayData", + "info": "The output dielectric functions." + }, + { + "name": "dos", + "required": false, + "valid_types": "ArrayData", + "info": "The output dos." + }, + { + "name": "dynmat", + "required": false, + "valid_types": "ArrayData", + "info": "The output dynamical matrix." + }, + { + "name": "energies", + "required": false, + "valid_types": "ArrayData", + "info": "The output total energies." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "The output forces." + }, + { + "name": "hessian", + "required": false, + "valid_types": "ArrayData", + "info": "The output Hessian matrix." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "The output k-points." + }, + { + "name": "magnetization_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output magnetization density." + }, + { + "name": "projectors", + "required": false, + "valid_types": "ArrayData", + "info": "The output projectors of decomposition." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "The output of the site magnetization" + }, + { + "name": "stress", + "required": false, + "valid_types": "ArrayData", + "info": "The output stress." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The output structure." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The output trajectory data." + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "The output plane wave coefficients file." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 420, + "message": "no called workchain detected" + }, + { + "status": 500, + "message": "unknown error detected in the bands workchain" + }, + { + "status": 2001, + "message": "BandsData not found in exposed_ouputs" + } + ] + }, + "class": "aiida_vasp.workchains.bands:BandsWorkChain" + }, + "vasp.converge": { + "description": [ + "A workchain to perform convergence tests." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "converge", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "dynamics", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "potential_family", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "potential_mapping", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "" + }, + { + "name": "verify", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "\n If True, clean the work dir upon the completion of a successfull calculation.\n " + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "" + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "\n The maximum number of iterations to perform.\n " + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "relax", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData", + "info": "\n The restart folder from a previous workchain run that is going to be used.\n " + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "Site magnetization to be used as MAGMOM" + }, + { + "name": "verbose", + "required": false, + "valid_types": "Bool", + "info": "\n If True, enable more detailed output during workchain execution.\n " + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "" + } + ], + "outputs": [ + { + "name": "converge", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "custom_outputs", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "misc", + "required": true, + "valid_types": "Dict", + "info": "The output parameters containing smaller quantities that do not depend on system size." + }, + { + "name": "relax", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "bands", + "required": false, + "valid_types": "BandsData", + "info": "The output band structure." + }, + { + "name": "born_charges", + "required": false, + "valid_types": "ArrayData", + "info": "The output Born effective charges." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output charge density." + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "The output charge density CHGCAR file." + }, + { + "name": "dielectrics", + "required": false, + "valid_types": "ArrayData", + "info": "The output dielectric functions." + }, + { + "name": "dos", + "required": false, + "valid_types": "ArrayData", + "info": "The output dos." + }, + { + "name": "dynmat", + "required": false, + "valid_types": "ArrayData", + "info": "The output dynamical matrix." + }, + { + "name": "energies", + "required": false, + "valid_types": "ArrayData", + "info": "The output total energies." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "The output forces." + }, + { + "name": "hessian", + "required": false, + "valid_types": "ArrayData", + "info": "The output Hessian matrix." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "The output k-points." + }, + { + "name": "magnetization_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output magnetization density." + }, + { + "name": "projectors", + "required": false, + "valid_types": "ArrayData", + "info": "The output projectors of decomposition." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "The output of the site magnetization" + }, + { + "name": "stress", + "required": false, + "valid_types": "ArrayData", + "info": "The output stress." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The output structure." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The output trajectory data." + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "The output plane wave coefficients file." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 420, + "message": "no called workchain detected" + }, + { + "status": 500, + "message": "unknown error detected in the converge workchain" + } + ] + }, + "class": "aiida_vasp.workchains.converge:ConvergeWorkChain" + }, + "vasp.immigrant": { + "description": [ + "Import a VASP run executed in the directory specified by folder_path." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "\n If True, clean the work dir upon the completion of a successfull calculation.\n " + }, + { + "name": "folder_path", + "required": false, + "valid_types": "Str", + "info": "Deprecated." + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "\n The maximum number of iterations to perform.\n " + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "potential_family", + "required": false, + "valid_types": "Str", + "info": "" + }, + { + "name": "potential_mapping", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "remote_workdir", + "required": false, + "valid_types": "str", + "info": "" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "use_chgcar", + "required": false, + "valid_types": "Bool", + "info": "\n If True, WavefunData (of WAVECAR) is attached.\n " + }, + { + "name": "use_wavecar", + "required": false, + "valid_types": "Bool", + "info": "\n If True, WavefunData (of WAVECAR) is attached.\n " + }, + { + "name": "verbose", + "required": false, + "valid_types": "Bool", + "info": "\n If True, enable more detailed output during workchain execution.\n " + } + ], + "outputs": [ + { + "name": "custom_outputs", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "misc", + "required": true, + "valid_types": "Dict", + "info": "The output parameters containing smaller quantities that do not depend on system size." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "bands", + "required": false, + "valid_types": "BandsData", + "info": "The output band structure." + }, + { + "name": "born_charges", + "required": false, + "valid_types": "ArrayData", + "info": "The output Born effective charges." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output charge density." + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "The output charge density CHGCAR file." + }, + { + "name": "dielectrics", + "required": false, + "valid_types": "ArrayData", + "info": "The output dielectric functions." + }, + { + "name": "dos", + "required": false, + "valid_types": "ArrayData", + "info": "The output dos." + }, + { + "name": "dynmat", + "required": false, + "valid_types": "ArrayData", + "info": "The output dynamical matrix." + }, + { + "name": "energies", + "required": false, + "valid_types": "ArrayData", + "info": "The output total energies." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "The output forces." + }, + { + "name": "hessian", + "required": false, + "valid_types": "ArrayData", + "info": "The output Hessian matrix." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "The output k-points." + }, + { + "name": "magnetization_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output magnetization density." + }, + { + "name": "projectors", + "required": false, + "valid_types": "ArrayData", + "info": "The output projectors of decomposition." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "The output of the site magnetization" + }, + { + "name": "stress", + "required": false, + "valid_types": "ArrayData", + "info": "The output stress." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The output structure." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The output trajectory data." + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "The output plane wave coefficients file." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + } + ] + }, + "class": "aiida_vasp.workchains.immigrant:VaspImmigrantWorkChain" + }, + "vasp.master": { + "description": [ + "The master workchain that selects sub workchains to perform necessary calculations." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "converge", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "dos", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "dynamics", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "options", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "potential_family", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "potential_mapping", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "" + }, + { + "name": "verify", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "" + }, + { + "name": "extract_bands", + "required": false, + "valid_types": "Bool", + "info": "\n Do you want to extract the band structure?\n " + }, + { + "name": "extract_dos", + "required": false, + "valid_types": "Bool", + "info": "\n Do you want to extract the density of states?\n " + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "" + }, + { + "name": "kpoints_distance", + "required": false, + "valid_types": "Float", + "info": "\n The maximum distance between k-points in inverse AA.\n " + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "\n The maximum number of iterations to perform.\n " + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "relax", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData", + "info": "\n The restart folder from a previous workchain run that is going to be used.\n " + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "Site magnetization to be used as MAGMOM" + }, + { + "name": "verbose", + "required": false, + "valid_types": "Bool", + "info": "\n If True, enable more detailed output during workchain execution.\n " + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "" + } + ], + "outputs": [ + { + "name": "bands", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "dos", + "required": false, + "valid_types": "", + "info": "" + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 420, + "message": "no called workchain detected" + }, + { + "status": 500, + "message": "unknown error detected in the master workchain" + } + ] + }, + "class": "aiida_vasp.workchains.master:MasterWorkChain" + }, + "vasp.neb": { + "description": [ + "The NEB workchain.", + "", + " -------------------", + " Error handling enriched wrapper around VaspNEBCalculation.", + "", + " Deliberately conserves most of the interface (required inputs) of the VaspNEBCalculation class, but", + " makes it possible for a user to interact with a workchain and not a calculation.", + "", + " In addition, implement restarts of calculation when the calculation is net full converged for error handling." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "dynamics", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "final_structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "The input structure (POSCAR) for the final image." + }, + { + "name": "initial_structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "The input structure (POSCAR) for initial image." + }, + { + "name": "neb_images", + "required": true, + "valid_types": "StructureData, CifData", + "info": "Starting structure for the NEB images" + }, + { + "name": "options", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "The VASP input parameters (INCAR)." + }, + { + "name": "potential_family", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "potential_mapping", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ChargedensityData", + "info": "The charge density. (CHGCAR)" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "\n If True, clean the work dir upon the completion of a successfull calculation.\n " + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "" + }, + { + "name": "kpoints_spacing", + "required": false, + "valid_types": "Float", + "info": "Spacing for the kpoints in units A^-1 * 2pi (CASTEP style `kpoints_mp_spacing`)" + }, + { + "name": "kpoints_spacing_vasp", + "required": false, + "valid_types": "Float", + "info": "Spacing for the kpoints in units A^-1 (VASP style)" + }, + { + "name": "ldau_mapping", + "required": false, + "valid_types": "Dict", + "info": "Mappings, see the doc string of 'get_ldau_keys'" + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "\n The maximum number of iterations to perform.\n " + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData", + "info": "A remote folder to restart from if need be" + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "Additional parameters not related to VASP itself." + }, + { + "name": "verbose", + "required": false, + "valid_types": "Bool", + "info": "\n If True, enable more detailed output during workchain execution.\n " + }, + { + "name": "wavefunctions", + "required": false, + "valid_types": "WavefunData", + "info": "The wave function coefficients. (WAVECAR)" + } + ], + "outputs": [ + { + "name": "custom_outputs", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "misc", + "required": true, + "valid_types": "Dict", + "info": "Per-image misc output." + }, + { + "name": "neb_misc", + "required": true, + "valid_types": "Dict", + "info": "NEB related data combined for each image" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "NEB images" + }, + { + "name": "bands", + "required": false, + "valid_types": "BandsData", + "info": "The output band structure." + }, + { + "name": "born_charges", + "required": false, + "valid_types": "ArrayData", + "info": "The output Born effective charges." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output charge density." + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "The output charge density." + }, + { + "name": "dielectrics", + "required": false, + "valid_types": "ArrayData", + "info": "The output dielectric functions." + }, + { + "name": "dos", + "required": false, + "valid_types": "ArrayData", + "info": "The output dos." + }, + { + "name": "dynmat", + "required": false, + "valid_types": "ArrayData", + "info": "The output dynamical matrix." + }, + { + "name": "energies", + "required": false, + "valid_types": "ArrayData", + "info": "The output total energies." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "The output forces." + }, + { + "name": "hessian", + "required": false, + "valid_types": "ArrayData", + "info": "The output Hessian matrix." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "Kpoints for each image." + }, + { + "name": "magnetization_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output magnetization density." + }, + { + "name": "projectors", + "required": false, + "valid_types": "ArrayData", + "info": "The output projectors of decomposition." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "The output of the site magnetization for each image." + }, + { + "name": "stress", + "required": false, + "valid_types": "ArrayData", + "info": "The output stress." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The output trajectory data." + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "The output file containing the plane wave coefficients." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 401, + "message": "The maximum number of iterations was exceeded." + }, + { + "status": 402, + "message": "The process failed for an unknown reason, twice in a row." + }, + { + "status": 501, + "message": "Unrecoverable error in launched NEB calculations." + }, + { + "status": 700, + "message": "the user did not supply a potential family name" + }, + { + "status": 701, + "message": "ValueError was returned from get_potcars_from_structure" + }, + { + "status": 702, + "message": "the potential does not exist" + }, + { + "status": 703, + "message": "the exception: {exception} was thrown while massaging the parameters" + } + ] + }, + "class": "aiida_vasp.workchains.neb:VaspNEBWorkChain" + }, + "vasp.relax": { + "description": [ + "Structure relaxation workchain." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "dynamics", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "" + }, + { + "name": "options", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "potential_family", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "potential_mapping", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "relax", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "" + }, + { + "name": "verify", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "\n If True, clean the work dir upon the completion of a successfull calculation.\n " + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "\n The maximum number of iterations to perform.\n " + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData", + "info": "\n The restart folder from a previous workchain run that is going to be used.\n " + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "Site magnetization to be used as MAGMOM" + }, + { + "name": "verbose", + "required": false, + "valid_types": "Bool", + "info": "\n If True, enable more detailed output during workchain execution.\n " + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "" + } + ], + "outputs": [ + { + "name": "custom_outputs", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "misc", + "required": true, + "valid_types": "Dict", + "info": "The output parameters containing smaller quantities that do not depend on system size." + }, + { + "name": "relax", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "bands", + "required": false, + "valid_types": "BandsData", + "info": "The output band structure." + }, + { + "name": "born_charges", + "required": false, + "valid_types": "ArrayData", + "info": "The output Born effective charges." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output charge density." + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "The output charge density CHGCAR file." + }, + { + "name": "dielectrics", + "required": false, + "valid_types": "ArrayData", + "info": "The output dielectric functions." + }, + { + "name": "dos", + "required": false, + "valid_types": "ArrayData", + "info": "The output dos." + }, + { + "name": "dynmat", + "required": false, + "valid_types": "ArrayData", + "info": "The output dynamical matrix." + }, + { + "name": "energies", + "required": false, + "valid_types": "ArrayData", + "info": "The output total energies." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "The output forces." + }, + { + "name": "hessian", + "required": false, + "valid_types": "ArrayData", + "info": "The output Hessian matrix." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "The output k-points." + }, + { + "name": "magnetization_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output magnetization density." + }, + { + "name": "projectors", + "required": false, + "valid_types": "ArrayData", + "info": "The output projectors of decomposition." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "The output of the site magnetization" + }, + { + "name": "stress", + "required": false, + "valid_types": "ArrayData", + "info": "The output stress." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The output structure." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The output trajectory data." + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "The output plane wave coefficients file." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 300, + "message": "the called workchain does not contain the necessary relaxed output structure" + }, + { + "status": 420, + "message": "no called workchain detected" + }, + { + "status": 500, + "message": "unknown error detected in the relax workchain" + }, + { + "status": 502, + "message": "there was an error overriding the parameters" + } + ] + }, + "class": "aiida_vasp.workchains.relax:RelaxWorkChain" + }, + "vasp.vasp": { + "description": [ + "The VASP workchain.", + "", + " -------------------", + " Error handling enriched wrapper around VaspCalculation.", + "", + " Deliberately conserves most of the interface (required inputs) of the VaspCalculation class, but", + " makes it possible for a user to interact with a workchain and not a calculation.", + "", + " This is intended to be used instead of directly submitting a VaspCalculation,", + " so that future features like", + " automatic restarting, error checking etc. can be propagated to higher level workchains", + " automatically by implementing them here.", + "", + " Handlers are implemented to try fix common problems and improves the robustness.", + " Individual handlers can be enabled/disabled by setting the ``handler_overrides`` input port.", + " Additional settings may be passed under the \"settings\" input, which is also forwarded to the", + " calculations. The avaliable options are:", + "", + " - ``USE_WAVECAR_FOR_RESTART`` wether calculation restarts should use the WAVECAR. The default is ``True``.", + "", + " Usage::", + "", + " from aiida.common.extendeddicts import AttributeDict", + " from aiida.work import submit", + " basevasp = WorkflowFactory('vasp.vasp')", + " inputs = basevasp.get_builder()", + " inputs = AttributeDict()", + " ## ... set inputs", + " submit(basevasp, **inputs)", + "", + " To see a working example, including generation of input nodes from scratch, please", + " refer to ``examples/run_vasp_lean.py``." + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "" + }, + { + "name": "dynamics", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "" + }, + { + "name": "options", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "potential_family", + "required": true, + "valid_types": "Str", + "info": "" + }, + { + "name": "potential_mapping", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData, CifData", + "info": "" + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "" + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "\n If True, clean the work dir upon the completion of a successfull calculation.\n " + }, + { + "name": "handler_overrides", + "required": false, + "valid_types": "Dict", + "info": "Mapping where keys are process handler names and the values are a boolean, where `True` will enable the corresponding handler and `False` will disable it. This overrides the default value set by the `enabled` keyword of the `process_handler` decorator with which the method is decorated." + }, + { + "name": "max_iterations", + "required": false, + "valid_types": "Int", + "info": "\n The maximum number of iterations to perform.\n " + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "restart_folder", + "required": false, + "valid_types": "RemoteData", + "info": "\n The restart folder from a previous workchain run that is going to be used.\n " + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict", + "info": "" + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "Site magnetization to be used as MAGMOM" + }, + { + "name": "verbose", + "required": false, + "valid_types": "Bool", + "info": "\n If True, enable more detailed output during workchain execution.\n " + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "" + } + ], + "outputs": [ + { + "name": "custom_outputs", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "misc", + "required": true, + "valid_types": "Dict", + "info": "The output parameters containing smaller quantities that do not depend on system size." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "bands", + "required": false, + "valid_types": "BandsData", + "info": "The output band structure." + }, + { + "name": "born_charges", + "required": false, + "valid_types": "ArrayData", + "info": "The output Born effective charges." + }, + { + "name": "charge_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output charge density." + }, + { + "name": "chgcar", + "required": false, + "valid_types": "ChargedensityData", + "info": "The output charge density CHGCAR file." + }, + { + "name": "dielectrics", + "required": false, + "valid_types": "ArrayData", + "info": "The output dielectric functions." + }, + { + "name": "dos", + "required": false, + "valid_types": "ArrayData", + "info": "The output dos." + }, + { + "name": "dynmat", + "required": false, + "valid_types": "ArrayData", + "info": "The output dynamical matrix." + }, + { + "name": "energies", + "required": false, + "valid_types": "ArrayData", + "info": "The output total energies." + }, + { + "name": "forces", + "required": false, + "valid_types": "ArrayData", + "info": "The output forces." + }, + { + "name": "hessian", + "required": false, + "valid_types": "ArrayData", + "info": "The output Hessian matrix." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData", + "info": "The output k-points." + }, + { + "name": "magnetization_density", + "required": false, + "valid_types": "ArrayData", + "info": "The output magnetization density." + }, + { + "name": "projectors", + "required": false, + "valid_types": "ArrayData", + "info": "The output projectors of decomposition." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "site_magnetization", + "required": false, + "valid_types": "Dict", + "info": "The output of the site magnetization" + }, + { + "name": "stress", + "required": false, + "valid_types": "ArrayData", + "info": "The output stress." + }, + { + "name": "structure", + "required": false, + "valid_types": "StructureData", + "info": "The output structure." + }, + { + "name": "trajectory", + "required": false, + "valid_types": "TrajectoryData", + "info": "The output trajectory data." + }, + { + "name": "wavecar", + "required": false, + "valid_types": "WavefunData", + "info": "The output plane wave coefficients file." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "the sun is shining" + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 300, + "message": "the calculation is missing at least one required output in the restart workchain" + }, + { + "status": 301, + "message": "The sub process excepted." + }, + { + "status": 302, + "message": "The sub process was killed." + }, + { + "status": 400, + "message": "the run_calculation step did not successfully add a calculation node to the context" + }, + { + "status": 401, + "message": "the maximum number of iterations was exceeded" + }, + { + "status": 402, + "message": "the calculation finished with an unexpected calculation state" + }, + { + "status": 403, + "message": "the calculation experienced and unexpected failure" + }, + { + "status": 404, + "message": "the calculation failed to submit, twice in a row" + }, + { + "status": 405, + "message": "the calculation failed for an unknown reason, twice in a row" + }, + { + "status": 500, + "message": "Missing critical output for inspecting the status of the calculation." + }, + { + "status": 501, + "message": "Cannot handle the error - inputs are likely need to be revised manually. Message: {message}" + }, + { + "status": 502, + "message": "Cannot handle the error - the last calculation did not reach the end of execution." + }, + { + "status": 503, + "message": "Cannot handle the error - the last calculation did not reach electronic convergence." + }, + { + "status": 504, + "message": "The ionic relaxation is not converged." + }, + { + "status": 505, + "message": "At least one of the ionic steps during the relaxation has did not have converged electronic structure." + }, + { + "status": 700, + "message": "the user did not supply a potential family name" + }, + { + "status": 701, + "message": "ValueError was returned from get_potcars_from_structure" + }, + { + "status": 702, + "message": "the potential does not exist" + }, + { + "status": 703, + "message": "the exception: {exception} was thrown while massaging the parameters" + } + ] + }, + "class": "aiida_vasp.workchains.vasp:VaspWorkChain" + } + }, + "console_scripts": { + "mock-vasp": "aiida_vasp.commands.mock_vasp:mock_vasp", + "mock-vasp-strict": "aiida_vasp.commands.mock_vasp:mock_vasp_strict" + } + }, + "commits_count": 110, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 4 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 3 + }, + { + "colorclass": "red", + "text": "Data", + "count": 5 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 7 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 2 + }, + { + "colorclass": "orange", + "text": "Other (Data commands, Groups)", + "count": 2 + } + ], + "pip_install_cmd": "pip install aiida-vasp", + "is_installable": "True" + }, + "aiida-wannier90": { + "code_home": "https://github.com/aiidateam/aiida-wannier90", + "documentation_url": "https://aiida-wannier90.readthedocs.io/", + "entry_point_prefix": "wannier90", + "pip_url": "aiida-wannier90", + "plugin_info": "https://raw.github.com/aiidateam/aiida-wannier90/master/setup.json", + "name": "aiida-wannier90", + "package_name": "aiida_wannier90", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA Plugin for the Wannier90 code", + "author": "Junfeng Qiao, Dominik Gresch, Antimo Marrazzo, Daniel Marchand, Giovanni Pizzi, Norma Rivano, The AiiDA team", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "2.1.0" + }, + "aiida_version": ">=2.0,<3", + "entry_points": { + "aiida.calculations": { + "wannier90.postw90": { + "description": [ + "Plugin for Wannier90.", + "", + " Wannier90 is a code for computing maximally-localized Wannier functions.", + " See http://www.wannier.org/ for more details." + ], + "spec": { + "inputs": [ + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Input parameters for the Wannier90 code" + }, + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Get input files (``.amn``, ``.mmn``, ...) from a class ``RemoteData`` possibly stored in a remote computer." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "input crystal structure" + }, + { + "name": "bands_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "A list of k-points along a path to be used for bands interpolation; it should contain `labels`. Specify either this or `kpoint_path`." + }, + { + "name": "clean_workdir", + "required": false, + "valid_types": "Bool", + "info": "If `True`, work directories of all called calculation jobs will be cleaned at the end of execution." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "kpoint_path", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Description of the k-points path to be used for bands interpolation; it should contain two properties: a list ``path`` of length-2 tuples with the labels of the endpoints of the path; and a dictionary ``point_coords`` giving the scaled coordinates for each high-symmetry endpoint." + }, + { + "name": "kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "k-point mesh used in the NSCF calculation." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "projections", + "required": false, + "valid_types": "OrbitalData, Dict, List, NoneType", + "info": "Starting projections for the Wannierisation procedure." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Additional settings to manage the Wannier90 calculation." + } + ], + "outputs": [ + { + "name": "boltzwann", + "required": true, + "valid_types": "", + "info": "" + }, + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The ``output_parameters`` output node of the successful calculation." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "interpolated_bands", + "required": false, + "valid_types": "BandsData", + "info": "The interpolated band structure by Wannier90 (if any)." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 300, + "message": "A Wannier90 error file (.werr) has been found." + }, + { + "status": 400, + "message": "The string \"Exiting...\" has been found in the Wannier90 output (some partial output might have been parsed)." + }, + { + "status": 401, + "message": "An error related to bvectors has been found in the Wannier90 output." + }, + { + "status": 402, + "message": "Energy window contains fewer states than number of target WFs." + }, + { + "status": 403, + "message": "Error plotting Wanier functions in cube format." + }, + { + "status": 404, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + }, + { + "status": 405, + "message": "Some output files were missing probably because the calculation got interrupted." + }, + { + "status": 406, + "message": "The retrieved temporary folder could not be accessed." + } + ] + }, + "class": "aiida_wannier90.calculations:Postw90Calculation" + }, + "wannier90.wannier90": { + "description": [ + "Plugin for Wannier90.", + "", + " Wannier90 is a code for computing maximally-localized Wannier functions.", + " See http://www.wannier.org/ for more details." + ], + "spec": { + "inputs": [ + { + "name": "kpoints", + "required": true, + "valid_types": "KpointsData", + "info": "k-point mesh used in the NSCF calculation." + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Input parameters for the Wannier90 code" + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "input crystal structure" + }, + { + "name": "bands_kpoints", + "required": false, + "valid_types": "KpointsData, NoneType", + "info": "A list of k-points along a path to be used for bands interpolation; it should contain `labels`. Specify either this or `kpoint_path`." + }, + { + "name": "code", + "required": false, + "valid_types": "AbstractCode, NoneType", + "info": "The `Code` to use for this job. This input is required, unless the `remote_folder` input is specified, which means an existing job is being imported and no code will actually be run." + }, + { + "name": "kpoint_path", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Description of the k-points path to be used for bands interpolation; it should contain two properties: a list ``path`` of length-2 tuples with the labels of the endpoints of the path; and a dictionary ``point_coords`` giving the scaled coordinates for each high-symmetry endpoint." + }, + { + "name": "local_input_folder", + "required": false, + "valid_types": "FolderData, NoneType", + "info": "Get input files (``.amn``, ``.mmn``, ...) from a class ``FolderData`` stored in the AiiDA repository." + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "monitors", + "required": false, + "valid_types": "Dict", + "info": "Add monitoring functions that can inspect output files while the job is running and decide to prematurely terminate the job." + }, + { + "name": "projections", + "required": false, + "valid_types": "OrbitalData, Dict, List, NoneType", + "info": "Starting projections for the Wannierisation procedure." + }, + { + "name": "remote_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Remote directory containing the results of an already completed calculation job without AiiDA. The inputs should be passed to the `CalcJob` as normal but instead of launching the actual job, the engine will recreate the input files and then proceed straight to the retrieve step where the files of this `RemoteData` will be retrieved as if it had been actually launched through AiiDA. If a parser is defined in the inputs, the results are parsed and attached as output nodes as usual." + }, + { + "name": "remote_input_folder", + "required": false, + "valid_types": "RemoteData, NoneType", + "info": "Get input files (``.amn``, ``.mmn``, ...) from a class ``RemoteData`` possibly stored in a remote computer." + }, + { + "name": "settings", + "required": false, + "valid_types": "Dict, NoneType", + "info": "Additional settings to manage the Wannier90 calculation." + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "The ``output_parameters`` output node of the successful calculation." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "interpolated_bands", + "required": false, + "valid_types": "BandsData", + "info": "The interpolated band structure by Wannier90 (if any)." + }, + { + "name": "nnkp_file", + "required": false, + "valid_types": "SinglefileData", + "info": "The ``.nnkp`` file, produced only in -pp (postproc) mode." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 131, + "message": "The specified account is invalid." + }, + { + "status": 140, + "message": "The node running the job failed." + }, + { + "status": 150, + "message": "{message}" + }, + { + "status": 200, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 210, + "message": "The retrieved folder did not contain the required stdout output file." + }, + { + "status": 300, + "message": "A Wannier90 error file (.werr) has been found." + }, + { + "status": 400, + "message": "The string \"Exiting...\" has been found in the Wannier90 output (some partial output might have been parsed)." + }, + { + "status": 401, + "message": "An error related to bvectors has been found in the Wannier90 output." + }, + { + "status": 402, + "message": "Energy window contains fewer states than number of target WFs." + }, + { + "status": 403, + "message": "Error plotting Wanier functions in cube format." + }, + { + "status": 404, + "message": "The stdout output file was incomplete probably because the calculation got interrupted." + } + ] + }, + "class": "aiida_wannier90.calculations:Wannier90Calculation" + } + }, + "aiida.parsers": { + "wannier90.postw90": "aiida_wannier90.parsers:Postw90Parser", + "wannier90.wannier90": "aiida_wannier90.parsers:Wannier90Parser" + }, + "aiida.workflows": { + "wannier90.minimal": { + "description": [ + "Workchain to run a full stack of Quantum ESPRESSO + Wannier90 for GaAs.", + "", + " Note that this is mostly to be used as an example, as there is no", + " error checking and runs directly Quantum ESPRESSO calculations rather", + " than the base workflows." + ], + "spec": { + "inputs": [ + { + "name": "kpoint_path", + "required": true, + "valid_types": "Dict", + "info": "The kpoints path for the NSCF run and Wannierisation." + }, + { + "name": "kpoints_nscf", + "required": true, + "valid_types": "KpointsData", + "info": "The kpoints for the NSCF run and Wannierisation." + }, + { + "name": "kpoints_scf", + "required": true, + "valid_types": "KpointsData", + "info": "The kpoints for the SCF run." + }, + { + "name": "projections", + "required": true, + "valid_types": "OrbitalData", + "info": "The projections for the Wannierisation." + }, + { + "name": "pseudo_family", + "required": true, + "valid_types": "Str", + "info": "The name of a pseudopotential family to use." + }, + { + "name": "pw2wannier90_code", + "required": true, + "valid_types": "Code", + "info": "The `pw2wannier90.x` code to use for the `Pw2Wannier90Calculation`s." + }, + { + "name": "pw_code", + "required": true, + "valid_types": "Code", + "info": "The `pw.x` code to use for the `PwCalculation`s." + }, + { + "name": "structure", + "required": true, + "valid_types": "StructureData", + "info": "The input structure." + }, + { + "name": "wannier_code", + "required": true, + "valid_types": "Code", + "info": "The `wannier90.x` code to use for the `Wannier90Calculation`s." + }, + { + "name": "max_wallclock_seconds", + "required": false, + "valid_types": "Int, NoneType", + "info": "Maximum wallclock time in seconds" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "num_machines", + "required": false, + "valid_types": "Int, NoneType", + "info": "The number of machines (nodes) to use" + } + ], + "outputs": [ + { + "name": "matrices_folder", + "required": true, + "valid_types": "FolderData", + "info": "" + }, + { + "name": "nnkp_file", + "required": true, + "valid_types": "SinglefileData", + "info": "" + }, + { + "name": "nscf_output", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "p2wannier_output", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "pw2wan_remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "" + }, + { + "name": "scf_output", + "required": true, + "valid_types": "Dict", + "info": "" + }, + { + "name": "wannier_bands", + "required": true, + "valid_types": "BandsData", + "info": "" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + } + ] + }, + "class": "aiida_wannier90.workflows.minimal:MinimalW90WorkChain" + } + } + }, + "commits_count": 20, + "development_status": "stable", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 2 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 2 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-wannier90", + "is_installable": "True" + }, + "aiida-wannier90-workflows": { + "code_home": "https://github.com/aiidateam/aiida-wannier90-workflows", + "development_status": "stable", + "entry_point_prefix": "wannier90_workflows", + "pip_url": "aiida-wannier90-workflows", + "plugin_info": "https://raw.github.com/aiidateam/aiida-wannier90-workflows/master/setup.json", + "name": "aiida-wannier90-workflows", + "package_name": "aiida_wannier90_workflows", + "hosted_on": "github.com", + "metadata": { + "description": "Advanced AiiDA workflows for Wannier90", + "author": "Junfeng Qiao, Antimo Marrazzo, Giovanni Pizzi", + "classifiers": [ + "Development Status :: 5 - Production/Stable", + "Environment :: Plugins", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "2.1.0" + }, + "aiida_version": ">=2.0,<3", + "entry_points": { + "aiida.calculations": { + "wannier90_workflows.split": "aiida_wannier90_workflows.calculations.split:Wannier90SplitCalculation" + }, + "aiida.parsers": { + "wannier90_workflows.split": "aiida_wannier90_workflows.parsers.split:Wannier90SplitParser" + }, + "aiida.workflows": { + "wannier90_workflows.bands": "aiida_wannier90_workflows.workflows.bands:Wannier90BandsWorkChain", + "wannier90_workflows.base.open_grid": "aiida_wannier90_workflows.workflows.base.open_grid:OpenGridBaseWorkChain", + "wannier90_workflows.base.projwfc": "aiida_wannier90_workflows.workflows.base.projwfc:ProjwfcBaseWorkChain", + "wannier90_workflows.base.pw2wannier90": "aiida_wannier90_workflows.workflows.base.pw2wannier90:Pw2wannier90BaseWorkChain", + "wannier90_workflows.base.wannier90": "aiida_wannier90_workflows.workflows.base.wannier90:Wannier90BaseWorkChain", + "wannier90_workflows.open_grid": "aiida_wannier90_workflows.workflows.open_grid:Wannier90OpenGridWorkChain", + "wannier90_workflows.optimize": "aiida_wannier90_workflows.workflows.optimize:Wannier90OptimizeWorkChain", + "wannier90_workflows.projwfcbands": "aiida_wannier90_workflows.workflows.projwfcbands:ProjwfcBandsWorkChain", + "wannier90_workflows.split": "aiida_wannier90_workflows.workflows.split:Wannier90SplitWorkChain", + "wannier90_workflows.wannier90": "aiida_wannier90_workflows.workflows.wannier90:Wannier90WorkChain" + }, + "console_scripts": { + "aiida-wannier90-workflows": "aiida_wannier90_workflows.cli:cmd_root" + } + }, + "commits_count": 42, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 10 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-wannier90-workflows", + "is_installable": "True" + }, + "aiida-wien2k": { + "code_home": "https://github.com/rubel75/aiida-wien2k", + "entry_point_prefix": "wien2k", + "name": "aiida-wien2k", + "package_name": "aiida_wien2k", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": 1, + "development_status": "planning", + "summaryinfo": [], + "pip_install_cmd": "See source code repository." + }, + "aiida-yambo": { + "code_home": "https://github.com/yambo-code/yambo-aiida/", + "development_status": "stable", + "entry_point_prefix": "yambo", + "pip_url": "aiida-yambo", + "plugin_info": "https://raw.github.com/yambo-code/yambo-aiida/master/setup.json", + "name": "aiida-yambo", + "package_name": "aiida_yambo", + "hosted_on": "github.com", + "metadata": { + "description": "YAMBO plugin and workflows for AiiDA", + "author": "Miki Bonacci, Michael Atambo, Antimo Marrazzo, Prandini Gianluca", + "author_email": "miki.bonacci@unimore.it", + "license": "MIT", + "home_page": "https://github.com/yambo-code/yambo-aiida", + "classifiers": [ + "Environment :: Plugins", + "Framework :: AiiDA", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Topic :: Scientific/Engineering :: Physics" + ], + "version": "1.3.0" + }, + "aiida_version": ">=1.0.0a2", + "entry_points": { + "aiida.calculations": { + "yambo.yambo": { + "description": [ + "AiiDA plugin for the Yambo code.", + " For more information, refer to http://www.yambo-code.org/", + " https://github.com/yambo-code/yambo-aiida and http://aiida-yambo.readthedocs.io/en/latest/" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "Use a main code for yambo calculation" + }, + { + "name": "parameters", + "required": true, + "valid_types": "Dict", + "info": "Use a node that specifies the input parameters" + }, + { + "name": "parent_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Use a remote folder as parent folder (for \"restarts and similar\"" + }, + { + "name": "settings", + "required": true, + "valid_types": "Dict", + "info": "Use an additional node for special settings" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + }, + { + "name": "precode_parameters", + "required": false, + "valid_types": "Dict", + "info": "Use a node that specifies the input parameters for the yambo precode" + }, + { + "name": "preprocessing_code", + "required": false, + "valid_types": "Code", + "info": "Use a preprocessing code for starting yambo" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "returns the output parameters" + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "array_alpha", + "required": false, + "valid_types": "ArrayData", + "info": "returns the alpha array" + }, + { + "name": "array_alpha_array", + "required": false, + "valid_types": "ArrayData", + "info": "returns the alpha array" + }, + { + "name": "array_alpha_bands", + "required": false, + "valid_types": "ArrayData", + "info": "returns the alpha array bands" + }, + { + "name": "array_eels", + "required": false, + "valid_types": "ArrayData", + "info": "returns the eels array" + }, + { + "name": "array_eps", + "required": false, + "valid_types": "ArrayData", + "info": "returns the eps array" + }, + { + "name": "array_ndb", + "required": false, + "valid_types": "ArrayData", + "info": "returns the array for ndb" + }, + { + "name": "array_ndb_HFlocXC", + "required": false, + "valid_types": "ArrayData", + "info": "returns the array ndb for HFlocXC" + }, + { + "name": "array_ndb_QP", + "required": false, + "valid_types": "ArrayData", + "info": "returns the array for ndbQP" + }, + { + "name": "array_qp", + "required": false, + "valid_types": "ArrayData", + "info": "returns the quasiparticle array band structure" + }, + { + "name": "bands_quasiparticle", + "required": false, + "valid_types": "BandsData", + "info": "returns the quasiparticle band structure" + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + }, + { + "name": "system_info", + "required": false, + "valid_types": "Dict", + "info": "returns some system information after a p2y" + } + ], + "exit_codes": [ + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + }, + { + "status": 500, + "message": "The retrieved folder data node could not be accessed." + }, + { + "status": 501, + "message": "time exceeded the max walltime" + }, + { + "status": 502, + "message": "failed calculation for some reason: could be a low number of conduction bands" + }, + { + "status": 503, + "message": "Unexpected behavior of YamboFolder" + }, + { + "status": 504, + "message": "parallelization error" + }, + { + "status": 505, + "message": "general memory error" + }, + { + "status": 506, + "message": "x_par allocation memory error" + } + ] + }, + "class": "aiida_yambo.calculations.yambo:YamboCalculation" + } + }, + "aiida.data": {}, + "aiida.parsers": { + "yambo.yambo": "aiida_yambo.parsers.parsers:YamboParser" + } + }, + "commits_count": 59, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-yambo", + "is_installable": "True" + }, + "aiida-yambo-wannier90": { + "code_home": "https://github.com/aiidaplugins/aiida-yambo-wannier90", + "entry_point_prefix": "yambo_wannier90", + "pip_url": "aiida-yambo-wannier90", + "plugin_info": "https://raw.githubusercontent.com/aiidaplugins/aiida-yambo-wannier90/main/pyproject.toml", + "documentation_url": "https://aiida-yambo-wannier90.readthedocs.io/en/latest/", + "version_file": "https://raw.githubusercontent.com/aiidaplugins/aiida-yambo-wannier90/main/aiida_yambo_wannier90/__init__.py", + "name": "aiida-yambo-wannier90", + "package_name": "aiida_yambo_wannier90", + "hosted_on": "github.com", + "metadata": { + "description": "Plugin to combine Wannier90 interpolations with GW corrections computed by Yambo", + "author": "The AiiDA Team", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python" + ], + "version": "0.1.0b0" + }, + "aiida_version": ">=1.6.4,<3", + "entry_points": { + "aiida.calculations": { + "yambo_wannier90.gw2wannier90": "aiida_yambo_wannier90.calculations.gw2wannier90:Gw2wannier90Calculation" + }, + "aiida.parsers": { + "yambo_wannier90.gw2wannier90": "aiida_yambo_wannier90.parsers.gw2wannier90:Gw2wannier90Parser" + }, + "aiida.workflows": { + "yambo_wannier90": "aiida_yambo_wannier90.workflows:YamboWannier90WorkChain" + }, + "console_scripts": { + "aiida-yambo-wannier90": "aiida_yambo_wannier90.cli:cmd_root" + } + }, + "commits_count": 0, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "green", + "text": "Workflows", + "count": 1 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + } + ], + "pip_install_cmd": "pip install --pre aiida-yambo-wannier90", + "is_installable": "True" + }, + "aiida-yascheduler": { + "code_home": "https://github.com/tilde-lab/yascheduler", + "documentation_url": "https://github.com/tilde-lab/yascheduler", + "entry_point_prefix": "yascheduler", + "pip_url": "yascheduler", + "plugin_info": "https://raw.githubusercontent.com/tilde-lab/yascheduler/master/setup.json", + "name": "aiida-yascheduler", + "package_name": "aiida_yascheduler", + "hosted_on": "github.com", + "metadata": { + "description": "Yet another computing scheduler and cloud orchestration engine", + "author": "Andrey Sobolev", + "author_email": "Evgeny Blokhin , Sergei Korolev ", + "classifiers": [ + "Development Status :: 4 - Beta", + "Framework :: AiiDA", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Chemistry", + "Topic :: Scientific/Engineering :: Information Analysis", + "Topic :: Scientific/Engineering :: Physics", + "Topic :: Software Development :: Libraries :: Python Modules" + ], + "version": "1.2.0" + }, + "aiida_version": null, + "entry_points": { + "aiida.schedulers": { + "yascheduler": "yascheduler.aiida_plugin:YaScheduler" + }, + "console_scripts": { + "yainit": "yascheduler.utils:init", + "yanodes": "yascheduler.utils:show_nodes", + "yascheduler": "yascheduler.utils:daemonize", + "yasetnode": "yascheduler.utils:manage_node", + "yastatus": "yascheduler.utils:check_status", + "yasubmit": "yascheduler.utils:submit" + } + }, + "commits_count": 76, + "development_status": "beta", + "summaryinfo": [ + { + "colorclass": "purple", + "text": "Console scripts", + "count": 6 + }, + { + "colorclass": "orange", + "text": "Other (Schedulers)", + "count": 1 + } + ], + "pip_install_cmd": "pip install yascheduler", + "is_installable": "True" + }, + "aiida-z2pack": { + "code_home": "https://github.com/AntimoMarrazzo/aiida-z2pack", + "entry_point_prefix": "z2pack", + "pip_url": "git+https://github.com/AntimoMarrazzo/aiida-z2pack", + "name": "aiida-z2pack", + "package_name": "aiida_z2pack", + "hosted_on": "github.com", + "metadata": {}, + "aiida_version": null, + "entry_points": {}, + "commits_count": 18, + "development_status": "planning", + "summaryinfo": [], + "pip_install_cmd": "pip install git+https://github.com/AntimoMarrazzo/aiida-z2pack" + }, + "aiida-zeopp": { + "code_home": "https://github.com/ltalirz/aiida-zeopp", + "development_status": "stable", + "entry_point_prefix": "zeopp", + "pip_url": "aiida-zeopp", + "name": "aiida-zeopp", + "package_name": "aiida_zeopp", + "hosted_on": "github.com", + "metadata": { + "description": "AiiDA plugin for zeo++", + "author": "Leopold Talirz", + "author_email": "leopold.talirz@epfl.ch", + "license": "Creative Commons", + "home_page": "https://github.com/ltalirz/aiida-zeopp", + "classifiers": [ + "Framework :: AiiDA", + "Programming Language :: Python" + ], + "version": "1.1.2" + }, + "aiida_version": null, + "entry_points": { + "aiida.calculations": { + "zeopp.network": { + "description": [ + "AiiDA calculation plugin for the zeo++ network binary" + ], + "spec": { + "inputs": [ + { + "name": "code", + "required": true, + "valid_types": "Code", + "info": "The `Code` to use for this job." + }, + { + "name": "parameters", + "required": true, + "valid_types": "NetworkParameters", + "info": "command line parameters for zeo++" + }, + { + "name": "structure", + "required": true, + "valid_types": "CifData", + "info": "input structure to be analyzed" + }, + { + "name": "atomic_radii", + "required": false, + "valid_types": "SinglefileData", + "info": "atomic radii file" + }, + { + "name": "metadata", + "required": false, + "valid_types": "", + "info": "" + } + ], + "outputs": [ + { + "name": "output_parameters", + "required": true, + "valid_types": "Dict", + "info": "key-value pairs parsed from zeo++ output file(s)." + }, + { + "name": "remote_folder", + "required": true, + "valid_types": "RemoteData", + "info": "Input files necessary to run the process will be stored in this folder node." + }, + { + "name": "retrieved", + "required": true, + "valid_types": "FolderData", + "info": "Files that are retrieved by the daemon will be stored in this node. By default the stdout and stderr of the scheduler will be added, but one can add more by specifying them in `CalcInfo.retrieve_list`." + }, + { + "name": "block", + "required": false, + "valid_types": "SinglefileData", + "info": "Blocked pockets fileoutput file." + }, + { + "name": "remote_stash", + "required": false, + "valid_types": "RemoteStashData", + "info": "Contents of the `stash.source_list` option are stored in this remote folder after job completion." + } + ], + "exit_codes": [ + { + "status": 0, + "message": "Calculation completed successfully." + }, + { + "status": 1, + "message": "The process has failed with an unspecified error." + }, + { + "status": 2, + "message": "The process failed with legacy failure mode." + }, + { + "status": 10, + "message": "The process returned an invalid output." + }, + { + "status": 11, + "message": "The process did not register a required output." + }, + { + "status": 100, + "message": "The process did not have the required `retrieved` output." + }, + { + "status": 101, + "message": "Not all expected output files were found." + }, + { + "status": 102, + "message": "Empty block file. This indicates the calculation of blocked pockets did not finish." + }, + { + "status": 110, + "message": "The job ran out of memory." + }, + { + "status": 120, + "message": "The job ran out of walltime." + } + ] + }, + "class": "aiida_zeopp.calculations.network:NetworkCalculation" + } + }, + "aiida.parsers": { + "zeopp.network": "aiida_zeopp.parsers.network:NetworkParser" + }, + "aiida.data": { + "zeopp.parameters": "aiida_zeopp.data.parameters:NetworkParameters" + }, + "console_scripts": { + "zeopp-submit": "aiida_zeopp.console_scripts.data_cli:cli" + }, + "aiida.workflows": {} + }, + "commits_count": 0, + "summaryinfo": [ + { + "colorclass": "blue", + "text": "Calculations", + "count": 1 + }, + { + "colorclass": "brown", + "text": "Parsers", + "count": 1 + }, + { + "colorclass": "red", + "text": "Data", + "count": 1 + }, + { + "colorclass": "purple", + "text": "Console scripts", + "count": 1 + } + ], + "pip_install_cmd": "pip install aiida-zeopp", + "is_installable": "True" + } + }, + "globalsummary": [ + { + "name": "Calculations", + "colorclass": "blue", + "num_entries": 54, + "total_num": 132 + }, + { + "name": "Parsers", + "colorclass": "brown", + "num_entries": 55, + "total_num": 110 + }, + { + "name": "Data", + "colorclass": "red", + "num_entries": 29, + "total_num": 101 + }, + { + "name": "Workflows", + "colorclass": "green", + "num_entries": 39, + "total_num": 130 + }, + { + "name": "Console scripts", + "colorclass": "purple", + "num_entries": 16, + "total_num": 27 + }, + { + "name": "Other", + "tooltip": "Aenet potentials, Calculations importers, Calculations monitors, ...", + "colorclass": "orange", + "num_entries": 26, + "total_num": 99 + } + ], + "status_dict": { + "planning": [ + "Not yet ready to use. Developers welcome!", + "status-planning-d9644d.svg" + ], + "pre-alpha": [ + "Not yet ready to use. Developers welcome!", + "status-planning-d9644d.svg" + ], + "alpha": [ + "Adds new functionality, not yet ready for production. Testing welcome!", + "status-alpha-d6af23.svg" + ], + "beta": [ + "Adds new functionality, not yet ready for production. Testing welcome!", + "status-beta-d6af23.svg" + ], + "stable": [ + "Ready for production calculations. Bug reports welcome!", + "status-stable-4cc61e.svg" + ], + "mature": [ + "Ready for production calculations. Bug reports welcome!", + "status-stable-4cc61e.svg" + ], + "inactive": [ + "No longer maintained.", + "status-inactive-bbbbbb.svg" + ] + }, + "entrypointtypes": { + "aiida.calculations": "CalcJobs and calculation functions", + "aiida.parsers": "CalcJob parsers", + "aiida.data": "Data node types", + "aiida.cmdline.data": "verdi data commands", + "aiida.groups": "Group types", + "aiida.workflows": "WorkChains and work functions", + "aiida.schedulers": "Job scheduler support", + "aiida.transports": "Data transport protocols", + "aiida.tests": "Development test modules", + "aiida.tools.dbexporters": "Support for exporting to external databases", + "aiida.tools.dbimporters": "Support for importing from external databases", + "console_scripts": "Console scripts" + } +} \ No newline at end of file diff --git a/pr-preview/pr-254/status-alpha-d6af23.svg b/pr-preview/pr-254/status-alpha-d6af23.svg new file mode 100644 index 00000000..8d8df548 --- /dev/null +++ b/pr-preview/pr-254/status-alpha-d6af23.svg @@ -0,0 +1 @@ + statusstatusalphaalpha diff --git a/pr-preview/pr-254/status-beta-d6af23.svg b/pr-preview/pr-254/status-beta-d6af23.svg new file mode 100644 index 00000000..e7c31fe5 --- /dev/null +++ b/pr-preview/pr-254/status-beta-d6af23.svg @@ -0,0 +1 @@ + statusstatusbetabeta diff --git a/pr-preview/pr-254/status-inactive-bbbbbb.svg b/pr-preview/pr-254/status-inactive-bbbbbb.svg new file mode 100644 index 00000000..f893172e --- /dev/null +++ b/pr-preview/pr-254/status-inactive-bbbbbb.svg @@ -0,0 +1 @@ +status: inactivestatusinactive diff --git a/pr-preview/pr-254/status-planning-d9644d.svg b/pr-preview/pr-254/status-planning-d9644d.svg new file mode 100644 index 00000000..9c282792 --- /dev/null +++ b/pr-preview/pr-254/status-planning-d9644d.svg @@ -0,0 +1 @@ + statusstatusplanningplanning diff --git a/pr-preview/pr-254/status-stable-4cc61e.svg b/pr-preview/pr-254/status-stable-4cc61e.svg new file mode 100644 index 00000000..980341bc --- /dev/null +++ b/pr-preview/pr-254/status-stable-4cc61e.svg @@ -0,0 +1 @@ + statusstatusstablestable