From cf7f4f085267c2940fc137b194c82aa6b833697c Mon Sep 17 00:00:00 2001 From: Amith225 Date: Wed, 23 Dec 2020 15:51:19 +0530 Subject: [PATCH] Clean up ignored files --- .gitignore | 3 + LICENSE | 21 ++ README.md | 11 + __data__/Examples/dumy | 0 __data__/Saves/decagon | Bin 0 -> 22 bytes __data__/Saves/dumyl | 0 requirements.txt | 1 + scr/__pycache__/_data_.cpython-37.pyc | Bin 322 -> 0 bytes scr/__pycache__/gui.cpython-37.pyc | Bin 2857 -> 0 bytes scr/__pycache__/renderer.cpython-37.pyc | Bin 9544 -> 0 bytes scr/demo.py | 105 ++++++++++ scr/generator.py | 57 +++++ scr/gui.py | 121 +++++++++++ scr/renderer.py | 267 ++++++++++++++++++++++++ 14 files changed, 586 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 __data__/Examples/dumy create mode 100644 __data__/Saves/decagon create mode 100644 __data__/Saves/dumyl create mode 100644 requirements.txt delete mode 100644 scr/__pycache__/_data_.cpython-37.pyc delete mode 100644 scr/__pycache__/gui.cpython-37.pyc delete mode 100644 scr/__pycache__/renderer.cpython-37.pyc create mode 100644 scr/demo.py create mode 100644 scr/generator.py create mode 100644 scr/gui.py create mode 100644 scr/renderer.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d5bd4fb --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +venv/ +.idea/ +__pycache__/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..56ac0a3 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Amith225 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..231f088 --- /dev/null +++ b/README.md @@ -0,0 +1,11 @@ +# 3D-ENGINE +--- +## **CS PROJECT** +### ***This is a python library which can be used for 3D modelling and rendering*** +--- +### How To Install +* #### _Make sure pip is installed in your system, else install pip before proceeding_ +* #### _Download the zip file from [Download v1.0-beta](https://github.com/Amith225/3D-ENGINE/archive/v1.0-beta.zip)_ +* #### _After Installing open cmd inside the 3D-ENGINE folder and run ```pip install -r requirements.txt```_ +* #### _If using virtiual environment run the command through it_ +* #### _Finnaly to see the demo run main.py in scr. Use the renderer.py as a library_ diff --git a/__data__/Examples/dumy b/__data__/Examples/dumy new file mode 100644 index 0000000..e69de29 diff --git a/__data__/Saves/decagon b/__data__/Saves/decagon new file mode 100644 index 0000000000000000000000000000000000000000..5d5f1e85d12bd4ebb3426e7e75402cda71a7e142 GIT binary patch literal 22 YcmZo*_U7_t^k!@;WPnhN?S)Kw066*uAOHXW literal 0 HcmV?d00001 diff --git a/__data__/Saves/dumyl b/__data__/Saves/dumyl new file mode 100644 index 0000000..e69de29 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..250d96c --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +numpy==1.19.4 diff --git a/scr/__pycache__/_data_.cpython-37.pyc b/scr/__pycache__/_data_.cpython-37.pyc deleted file mode 100644 index a8ff44fcd43ce05a47efa8d1080ace78ba7e77cd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 322 zcmXv~!Ait15KYo_n=TfLH}770a90pKh={0*dMO3bLkNV}M7B%YElF4CUi}Oa^yt6n z59I2}U+`qI;tccN8<;l?bCx7y;9@uZwJcH-2PxpE+Zgf(a&h0tqMBZ@X0 zP^_TK@ChzPzG$TID^bxikiiRjgEUsL#5;h-65W6-{C3+w1}!c`KhM|OQ_iL61?N*w zleSyaP-xBfvoVc2H&a*l8aWRQO7lp=cGYwPMQvvZ=MPn_IJcutXw~)>`+1A&rH~f; zb?-nEXWhWh;rS7}HA)+HSII%JVkqZAH-%mp<- ze1MU?1ELTsnXqIa9qGQ{h-*ks*1$Q^m-QD+xY9T7l^C;%nTc}wS^D= zps0Pxq*wT$wZb*j`;vWCTdvVKW1RNhEEv_|gm$rlEGy#{>eZ0V6YM|dr>g}!*ws2V8E(RU6L#lE8D3jv*nr1|@wm(YP5ZR9eD_Vf`}M+>H%{>V!dtN2s%#P!4_}wq zWB4m*S58f}4EM$vzQ`^5J}akhR=h>tTNrOi1~jerGzUapN+N~>R@h5|QOS?a`5ocz zB~gFB%Z0T!ESQxZoWqI7+Kc3hA9?5u?-J3U_sA;65JWeO~=KkMOsgkY7dDqfy+C{CO9voni zl67yFt$sQg#KT$Tot0L;Us_N1OFsS{RKgkRSPJ*!Xip`gPO8w<$J3?|qEXG2GRQ8A zB36IkjZt_zvpi2HqIp2Y3im3>HEWjEC=re7jm+zn%}S_VtaKwO!=q|C?$)t4l??O- zeV$Ds9U|9>Q0FW8amf!$KKh#J-$6dVbN}A%Cz;aO?x(Rlj&>iYY(Gz@yYD^t!^7UA z$GwNUsO;`=7T=v7>vcr97}*X8Ij}lB zusp;DeBMOoyL<7e`;?iU#&X=poP7!q7mn#yyjw8TNb)Zy~Y-S^mC^Op7V9^w41S8)Xc7F2k4yZ3^MJ#l53FtD|Y{iy?(`BzhY-& zzX;Gzea@bJ_!%qgFS!1Q`gdVhofkC;sQ;K=5%ffTs-pgAIvWhCwgD5NIl%swgX^`b*vEJRR^@&&~L*gTGJG1vap}bvRrA=97KI}A?ZJo zi;G5;YBVuegqw{boK9={CyZ^3_k<0*fqO;helA;P$y~?LWSFSzUFfnc5XOBLu-gE3 z0QkPcx551uz}^C^JwSKfA<+JJaG#niJybfXoPq9B6!Qyy^l`x-VE%k(VHMV{vj8}k zmnT3`n2VM!TR_4B1U&PJE7>+OUD_n$k4MEpd)3#zh%z?B;aYM|T>3*W{@+%?--n-F zr8;!5U#@}!^M9@4{=zEeh?4zs6|Y+5ng3Y@+*)TcWE1-36XXZwW#^DEX}9y5#8<AYlr`uS7W|< z36Clh?n?S&sHwa4x>m2(_2_z=;BE=SG6+NT{8>VLGYp^2qNK8zUaPk$k|6%d9;j_= yg0Lpk(aoNDAU+T71RbO6I--{EABC&?hbdCB5li*;%FU;rX@vA9EPT&m1V=uCa`1&SWp=Zr+Sw& zJ3BL~o~4$9o+JPzka{4Z1P~A)K`{H^B>D;Y3H|{Ag0>Mj7a$;#x8z9=R=)4_WiGtP zft0|r=+jlz)m5kJ+`c-!zf-N2Ed1_&IikUg;CY+b}sUQw(M- zAj-E}N4{wVk=N@S+#ChlPUHrDZ{OrFw(&qdv`iVdk1oIR_3J+fd=p&%VMpI?UElG8 z!_i=P{l%Tz5f@|0)eQsbH|^ita8 zL&spj){M+IJm90Y3&J1o98tF5^C*OBiiKkfoKr`k)KybERHsTo)baA2C^zh3Urhns zBd;|ag3WE$kf+Ftw;(F?2JP0UGw4U&z;yinNY)*b3OklsS~ug)Rn!AvNY&K19Qbn_%L7G z!6)-o{*eUB7OLD#@fnk%jD_qwr!}}hFqasV|j&TCFQh< z?b~^(Rv(JnY(}MK^Jt)tdn{L)&9{zQz4(ctjLZhf7Rgg2vz^OWG&Cp98c;b3s#GYs zC0AP8Xvlt4xofzBAxLK3U(h-2iFH3MK7!U=(&cw7*mgx%arfW>R&dY5>Q`|u=rz5L zdr>#^8Qe>HLvP|<)?4~4?iKx%K8Jf%pVyzmy{118CH`XMy&{(Sq5a?h3_SQ<5)ESN zEO8NSnk#?2^AfH56BJ@6F@jy-az-v}#JU3u(dwLaN(YidyT@mIf_6J;&x>OzglKOT zL%i24g*ni&rmLw4tqQFPT@hLrx+-){Y|z;?&w!5yBHi`K*&p1B?EWxv+5`9?)6$(* zKZu;8)=8AdvnHDNoKdSE*I3AnoBJ&jdE;i^$2}k1JRXgF6xzL(>5OhixqzNiG%;Eo zngRVAjJbK#8kx>XRE-Tctzs7QcFX9*5FYDWs=C zvG0z^)0JnAE-gHRbh^2b)%B)%mQEpIyn&W!0j)*0X@t6W?X&chSy`(CrJeXXa5fPX zb?&~S;SaJ|OXy#kmeHonPd{-yj+Ko0r13INO9`i?xsgkcQ_(IRAoO{;8$xdxy>+>UWj z<5Qt4c!E(5T#?YK%!D1y#+GvOCU=_i@lR#%tcBKeJ#J+j6T2zzih0BPJqxXnvV_P7 z4yxNKa$6^z;K9#7{`lkAzEJoW9+NtS8^h*y!+Z(1DAzKkbsM(k2RB>8*iOu4JTWiw zF~Wbp)jNpX{=giGhmUe?GYA4hFcUegUeEkGd(#chS;w!j`c;w_K%&(+a<+O+GZ;xQ zO|43%lIX=i5UEzA#>*DP=tXl2z6@fi4dpo-YQx^KE7r@1vm5qH_LizU=T!ricEPTx z68_fQaV4=YiScc&nm^7%m(HyUz6S?+V7&)$G*R!{Cttefbb%9gtoQ9VwK#anJUu(z!1_9Q+IVCV+*o_^&N>L-BrM4=f0Y(YCBvFK@Z(4*2#_0+Gstj-%)qe zj`imECu?CnT*KJw_tdm;SM4jn;KDnFa6KLch{&0qxvTDCj}b((Cm0cLkw5T+(g8e> zvffv3{m~zgtAZ|rmknH-l;1si0ohZuU_VdYNh53sw_9N&*8%Dve=1aQ7-tZN?9@FV%Y7RGxxQ<@2E|HE?Ke;!mpa{xz0TqDS9^!! za&J}`9b+rtSar`jwg1e1%Y8i#I56^}Af)E4R&U#m>`Rx; zS-gazd(_dLIA&#N=A+V;UMCo}2S>xJ`yDu=v zK>ZMdWbQC;+)&LiC`Jqi^(x+o)IRvF4hNn72-siyCw>qcxkSv@(506#Wi3odzakS7 z(aVl(U`H(O$f+7(Z|g6kY*T+xqk_g-gTI+76fdX^^&J4Q3{%2lLJ;?Xl9M1?E8*3k{!x_ZKQ zaarT1A1{Eo>l;{+bLfkUzJ+6!zkM$D&pZo6`CEW90~18KeW08mDvk^g({O+T8Qr`2 z21$cGJ-^crj*rBxo8M;hRT8??7|;l``7Kt7G$IF`fq+{51K)g?4W558!tqFgz|6JF zS#U4k_upp7!Eth6M!w!YBl@>j6|U0FjvO2}4EJr%8Gf|`@&d_=ojot;Xdf+dM`MBE z2GOFkS8KP9nt*M=5DP_pEdglrZLS`SI(wyY^TzT1zHiV=z#a``awVpWO7>6&kGYp6qp7(2EvsxQyVVm!Z(O2A&0a`Cq& zbVFTJb%d;^5SnVLZkOzf2u=l6u-RI%*HGR<={#T|-`Y}7<9W?qv&XeX)yNKyv_S?h z0fUU60OP7_Aw;Nk1gy~M0;tpNYIhxG(gkRrI3u**p(AsksJTQ@>v$P>fKG~s2mF5a z4IF=|FXo3JupuhJ(7JN^l1o8%Fo*zl_$Fq%$3 zs--F_SjUela*T+Pj~^9}hei zUVio*%BFMsKas;WI{o<^hwBN4^V0o4;_nyQK@HwMnBy-syG-9Qv!iGkS4F%}qX6@P zW8eve<|lXH-BU%ANrmY|tEns`#`IbvABxBma&y*D!E9MAbs0XUFhm`q%S)1v8kruZ zFXDLq7wDa;!ntLAo$J?TB<7FzOl7xF2~nleQc)Y$*U+Jvt;LRA9Ayt&AlW{3w8WP= z#M8gTm9mkV%|pBJsdy04#ypq-W?SZhxahCvxY$_Gx^yS}PsM|%{}Xs1DmtCxA=UhO zStFNhzJNaF0tx+2OrAjrA?9V?Rt)nZhItY5Ig%=gBsSk61_% zhTCDt&)h!EoCq-@^LYZFk+NGaT_{{E) zqMHkyEjnrDgYZPkc!${}dA}gpIh-ib2YD9IrJNqtP~6SHaB6l0bwL9}AbMp2mbLmI zIk3RS5~Lw|L3w4s{MMWxiKfhJAb*QX%+9mM&&1o6CCMIjMA_yilhJLYFP`NPG7F=0 zf^FnPwq1*ZH4!fGVQ2Hjj@9H?nD z62Vh1#{r}#z8v5yL3*~BX(zS)Z*T=vQKF*@h{knbK#9zAX~Crx&wuiewJkVuVyIU* z>Se5$f*iqhLqU*a+eFHC>LkfQoX#kA5jB)Tl7s0proKdRrUjOglpEEe2z6qDk7*1i zNe*UZ?Iy`Q%q7V@CCNdB-6^)=`{^b+LV`4BF^GV}T#f8&y!fAPKWT@*S zIDMEiq^F_E`Q11hHlIjaKEJ0KvpEHdTpo{O?Gcr@3<)eBt=8Bk2QblG`1~e`pwli(OA`4e+J21dF{nC`MuuimjGj4qh8apptn~#Hu>SuCRQ(TRnSPq6 zI$<)!F!2fHv*+-ChpPW^j>GkY!(S#;{gXNV>Ir|J6{=o>HEA$e(#|DKBp&*wIi}7n z>t2crfuuhbGMq!mk3^@3VB(+W7};9Vuau8ZhfitL2~K|`7X*d>Hpj)rf)0H)JkXsy zi3eiRf1l$a)#F8stjvhxR38t7>139j+Aj)!hj{l#qA6w`XCz65>QgU&Mh(Jx%X zB_Asvg1-C@6$`*Ohe+u~x$p6?x8N-Jy0|l9dI1gkk-^a0TR%-?GN#cp(zSd5U`M z8ELjHQWCU$3xoVeIvhlvH*DGV_HzB>qv7qCPfAHnJ%;j^9&>DEgCRW~|I5Txu2udq Pr{L5!YJY51YTx=Fh9yuw diff --git a/scr/demo.py b/scr/demo.py new file mode 100644 index 0000000..f452b61 --- /dev/null +++ b/scr/demo.py @@ -0,0 +1,105 @@ +import pickle + +import scr.generator as gn +import scr.gui as gui + +rd = gn.rd + + +class Main(gui.GUI): + def __init__(self): + gui.GUI.__init__(self, title="3D-ENGINE-Demo") + self.model_button.configure(command=lambda: self.model_it()) + self.load_var.trace('wua', lambda *_: self.load_model(self.load_var.get())) + self.rotate = self.rotate_var.get() + self.rotate_var.trace('wua', lambda *_: exec("self.rotate=self.rotate_var.get()", {'self': self})) + + self.space = None + self.object = None + self.camera = None + self.light = None + + self.loaded = False + self.srz_info = None + + def save_model(self, fname): + with open(f'{gui.os.path.dirname(gui.os.getcwd())}/__data__/Saves/{fname}', 'wb') as save_file: + pickle.dump(self.srz_info, save_file) + + def load_model(self, fname): + with open(f'{gui.os.path.dirname(gui.os.getcwd())}/__data__/{fname}', 'rb') as save_file: + data = pickle.load(save_file) + + self.side.delete(0, 'end'), self.radius.delete(0, 'end'), self.separation.delete(0, 'end') + self.side.insert(0, data[0]) + self.radius.insert(0, str(data[1])[1:-1]) + self.separation.insert(0, str(data[2])[1:-1]) + + self.loaded = True + self.model_it() + + def exec(self, expr): + exec(expr, {'self': self}) + + def key_bind(self): + self.canvas.bind("", lambda event: self.camera.oriental_translation(0, 0.1, 0)) + self.canvas.bind("", lambda event: self.camera.oriental_translation(0, -0.1, 0)) + self.canvas.bind("", lambda event: self.camera.oriental_translation(0.1, 0, 0)) + self.canvas.bind("", lambda event: self.camera.oriental_translation(-0.1, 0, 0)) + self.canvas.bind("", lambda event: self.camera.oriental_translation(0, 0, 0.1)) + self.canvas.bind("", lambda event: self.camera.oriental_translation(0, 0, -0.1)) + self.canvas.bind('w', lambda event: self.camera.oriental_rotation(1, 0, 0)) + self.canvas.bind('s', lambda event: self.camera.oriental_rotation(-1, 0, 0)) + self.canvas.bind('d', lambda event: self.camera.oriental_rotation(0, 1, 0)) + self.canvas.bind('a', lambda event: self.camera.oriental_rotation(0, -1, 0)) + self.canvas.bind('z', lambda event: self.camera.oriental_rotation(0, 0, 1)) + self.canvas.bind('', lambda event: self.camera.oriental_rotation(0, 0, -1)) + + self.canvas.bind("l", lambda event: [exec("light.lum += 1") for light in self.space.lights]) + self.canvas.bind("", lambda event: [exec("light.lum -= 1") for light in self.space.lights]) + self.canvas.bind("c", lambda event: self.exec("self.camera.clarity += 0.1")) + self.canvas.bind("", lambda event: self.exec("self.camera.clarity -= 0.1")) + self.canvas.bind("t", lambda event: self.exec("self.camera.shutter += 0.1")) + self.canvas.bind("", lambda event: self.exec("self.camera.shutter -= 0.1")) + + self.canvas.bind("h", lambda event: self.exec("self.hl='white'")) + self.canvas.bind("", lambda event: self.exec("self.hl=''")) + + def model_it(self): + self.space = rd.Space((self.canvas.winfo_reqwidth(), self.canvas.winfo_height())) + self.srz_info = eval(self.side.get()), eval(self.radius.get()), eval(self.separation.get()) + if not self.loaded: + self.load_button.configure(text='Load') + else: + self.loaded = False + self.object = gn.Spawn.parallelopiped(*self.srz_info) + fov = self.canvas.winfo_width(), self.canvas.winfo_height() + fov = 120 * fov[0] / sum(fov), 180 * fov[1] / sum(fov) + self.camera = rd.Camera(fov=fov, shutter=1, clarity=1) + self.light = rd.Light(360, 33) + self.space.add_object(self.object, location=(0, 0, 10.)) + self.space.add_camera(self.camera, location=(0, 0, 0.), orient=(0, 0, 1.)) + self.space.add_light(self.light) + + self.fov_bar_x.set(self.camera.fov[0]) + self.fov_bar_y.set(self.camera.fov[1]) + self.fov_bar_x.configure(command=lambda event: self.camera.change_fov(self.fov_bar_x.get(), + self.fov_bar_y.get())) + self.fov_bar_y.configure(command=lambda event: self.camera.change_fov(self.fov_bar_x.get(), + self.fov_bar_y.get())) + self.look_through.configure(command=lambda: self.camera.change_thresh(self.look_through_var.get())) + self.save_button.configure(command=lambda: self.save_model(self.save_entry.get()) or self.canvas.focus_set()) + + self.key_bind() + self.canvas.focus_set() + + self.draw_triangles(*self.camera.capture()) + + while 1: + if self.rotate: + self.object.oriental_rotation(0.1, 0.2, 0.5) + self.draw_triangles(*self.camera.capture()) + + +if __name__ == '__main__': + Main().mainloop() diff --git a/scr/generator.py b/scr/generator.py new file mode 100644 index 0000000..e4f219f --- /dev/null +++ b/scr/generator.py @@ -0,0 +1,57 @@ +import scr.renderer as rd + +np = rd.np + + +class Spawn: + @staticmethod + def polygon(s, theta=0, z=0, face='both', rtype='object', append_i=0, r=1): + points = [(0, 0, z), + *[(r * np.cos(np.radians(i * 360 / s + theta)), r * np.sin(np.radians(i * 360 / s + theta)), z) for i + in range(s + 1)]] + if face == 'both': + faces = [(append_i, i, i + 1) for i in range(1 + append_i, len(points) - 1 + append_i)] + \ + [(i, i + 1, append_i) for i in range(1 + append_i, len(points) - 1 + append_i)] + elif face == 'front': + faces = [(append_i, i, i + 1) for i in range(1 + append_i, len(points) - 1 + append_i)] + elif face == 'back': + faces = [(i + 1, i, append_i) for i in range(1 + append_i, len(points) - 1 + append_i)] + elif face == 'none': + faces = [] + else: + raise Exception('invalid face value') + + if rtype == 'object': + return rd.Object(points, faces) + else: + return points, faces + + @staticmethod + def parallelopiped(s, r=(1, 1), z=None, theta=0, rtype='object'): + if z is None: + z = [1 for i in r] + points, faces = [], [] + j_append = 0 + z_append = sum(z) + for i in range(len(r)): + z_append -= z[i] + if i == 0: + pointsi, facesi = Spawn.polygon(s, theta, z_append + z[i], 'back', '', len(points), r[i]) + elif i == len(r) - 1: + pointsi, facesi = Spawn.polygon(s, theta, z_append + z[i], 'front', '', len(points), r[i]) + else: + pointsi, facesi = Spawn.polygon(s, theta, z_append + z[i], 'none', '', len(points), r[i]) + + if i == 0: + pass + else: + facesj = [(len(points) + j, j + j_append, len(points) + j + 1) for j in range(1, len(pointsi) - 1)] + \ + [(j + j_append, j + j_append + 1, len(points) + j + 1) for j in range(1, len(pointsi) - 1)] + faces.extend(facesj) + j_append = len(points) + points.extend(pointsi), faces.extend(facesi) + + if rtype == 'object': + return rd.Object(points, faces) + else: + return points, faces diff --git a/scr/gui.py b/scr/gui.py new file mode 100644 index 0000000..01a46e6 --- /dev/null +++ b/scr/gui.py @@ -0,0 +1,121 @@ +import os +import tkinter as tk + + +class GUI(tk.Tk): + def __init__(self, size=(750, 500), title='No Title', **configurations): + self.configurations = configurations + tk.Tk.__init__(self, **self.configurations) + self.resizable(0, 0) + self.size = size + self.title(title) + x, y = (self.winfo_screenwidth() - self.size[0]) // 2, (self.winfo_screenheight() - self.size[1]) // 4 + w, h = self.size[0], self.size[1] + self.geometry(f"{w}x{h}+{x}+{y}") + self.update_idletasks() + + self.canvas = tk.Canvas(self, bg='black') + self.canvas.pack(fill='both', expand=True) + + self._set_input_frame() + + self.bot_frame = tk.Frame(self) + self.model_button = tk.Button(self.bot_frame, text='Model It') + self.model_button.grid(row=0, column=0) + self.save_frame = tk.Frame(self.bot_frame) + self.save_button = tk.Button(self.save_frame, text='Save It') + self.save_button.pack(side='left') + self.save_entry = tk.Entry(self.save_frame) + self.save_entry.pack(side='right', fill='both', expand=True) + self.save_frame.grid(row=0, column=1) + self.load_var = tk.StringVar() + self.load_var.set('Load') + parent = os.path.dirname(os.getcwd()) + '/__data__' + opts = os.listdir(parent) + items = dict([(opt, os.listdir(parent + '/' + opt)) for opt in opts]) + self.load_button = tk.Menubutton(self.bot_frame, textvariable=self.load_var, indicatoron=True, relief='raised', + borderwidth=2) + self.topMenu = tk.Menu(self.load_button, tearoff=False) + self.load_button.configure(menu=self.topMenu) + for key in sorted(items.keys()): + menu = tk.Menu(self.topMenu) + self.topMenu.add_cascade(label=key, menu=menu) + for value in items[key]: + menu.add_radiobutton(label=value, variable=self.load_var, value=key+'/'+value) + self.load_button.grid(row=0, column=2) + self.bot_frame.pack() + + self.hl = '' + + self.canvas.update_idletasks() + self.add_x, self.add_y = self.canvas.winfo_width() / 2, self.canvas.winfo_height() / 2 + + def _set_input_frame(self): + self.input_frame = tk.Frame(self) + self.right_frame = tk.Frame(self.input_frame) + self.left_frame = tk.Frame(self.input_frame) + + self.fov_bar_x = tk.Scale(self.right_frame, from_=0, to=360, length=180, orient='horizontal') + self.fov_bar_x.grid(row=0, column=1) + self.fov_bar_x_text = tk.Label(self.right_frame, text='Fov X:') + self.fov_bar_x_text.grid(row=0, column=0, sticky='n') + self.fov_bar_y = tk.Scale(self.right_frame, from_=0, to=360, length=180, orient='horizontal') + self.fov_bar_y.grid(row=1, column=1) + self.fov_bar_y_text = tk.Label(self.right_frame, text='Fov Y:') + self.fov_bar_y_text.grid(row=1, column=0, sticky='n') + + self.side = tk.Entry(self.left_frame) + self.side.grid(row=0, column=1) + self.side_text = tk.Label(self.left_frame, text='Side:') + self.side_text.grid(row=0, column=0, sticky='e') + self.radius = tk.Entry(self.left_frame) + self.radius.grid(row=1, column=1) + self.radius_text = tk.Label(self.left_frame, text='Radius:') + self.radius_text.grid(row=1, column=0, sticky='e') + self.separation = tk.Entry(self.left_frame) + self.separation.grid(row=2, column=1) + self.separation_text = tk.Label(self.left_frame, text='Separation:') + self.separation_text.grid(row=2, column=0, sticky='e') + self.look_through_var = tk.IntVar() + self.look_through_var.set(0) + self.look_through = tk.Checkbutton(self.left_frame, text='Look Through', variable=self.look_through_var) + self.look_through.grid(row=3, column=1) + self.rotate_var = tk.IntVar() + self.rotate_var.set(0) + self.rotate_button = tk.Checkbutton(self.left_frame, text='Rotate', variable=self.rotate_var) + self.rotate_button.grid(row=3, column=0) + + self.right_frame.pack(side='right') + self.left_frame.pack(side='left') + self.input_frame.pack() + + def draw_triangles(self, points_cluster, face_cluster, draw_orient=None, color=None): + if color is None: + color = self.winfo_rgb('white') + color = color[0] / 256, color[1] / 256, color[2] / 256 + self.canvas.delete('all') + for face in face_cluster: + face, shade = face[0], face[1] + p1, p2, p3 = points_cluster[face[0]], points_cluster[face[1]], points_cluster[face[2]] + col = '%02x%02x%02x' % (int(shade * color[0]), int(shade * color[1]), int(shade * color[2])) + col = '#' + col + self.canvas.create_polygon(p1[0][0] + self.add_x, p1[1][0] + self.add_y, + p2[0][0] + self.add_x, p2[1][0] + self.add_y, + p3[0][0] + self.add_x, p3[1][0] + self.add_y, + outline=self.hl, fill=col) + + if draw_orient: + self.draw_orient(draw_orient) + + self.canvas.update() + + def draw_orient(self, orient_cluster): + for orient in orient_cluster: + f, u, r, o = orient + + self.canvas.create_line(o[0][0] + self.add_x, o[1][0] + self.add_y, o[0][0] + f[0][0] + self.add_x, + o[1][0] + f[1][0] + self.add_y, fill='white') + self.canvas.create_line(o[0][0] + self.add_x, o[1][0] + self.add_y, o[0][0] + u[0][0] + self.add_x, + o[1][0] + u[1][0] + self.add_y, fill='red') + self.canvas.create_line(o[0][0] + self.add_x, o[1][0] + self.add_y, o[0][0] + r[0][0] + self.add_x, + o[1][0] + r[1][0] + self.add_y, fill='green') diff --git a/scr/renderer.py b/scr/renderer.py new file mode 100644 index 0000000..9d3259f --- /dev/null +++ b/scr/renderer.py @@ -0,0 +1,267 @@ +import numpy as np + + +class Space: + def __init__(self, screen=(500, 500), unit=250): + self.screen = screen + self.unit = unit + + self.objects = [] + self.cameras = [] + self.lights = [] + + def add_camera(self, camera, location=(0, 0, 0), orient=(0, 0, 1)): + camera.place(self, location, orient) + self.cameras.append(camera) + + def add_object(self, object, location=(0, 0, 0)): + object.place(self, location) + self.objects.append(object) + + def add_light(self, light, location=(0, 0, 0), orient=(0, 0, 0)): + light.place(location, orient) + self.lights.append(light) + + +class Camera: + def __init__(self, fov=(103, 77), z_far=100, z_near=1, shutter=1, clarity=2): + self.fov = fov + self.fov_cos = np.cos(np.radians(max(self.fov)) / 2) + self.fov_tan = np.tan(np.radians(self.fov) / 4) + self.z_far = z_far + self.z_near = z_near + self.shutter = shutter + self.clarity = clarity + self.thresh = 'doti > 0' + + self.space = None + self.location = None + + self.projection_matrix = None + self.camera_matrix = None + + self.forward = None + self.up = None + self.right = None + + def change_thresh(self, val): + if val == 0: + self.thresh = 'doti > 0' + else: + self.thresh = 'doti <= 0' + + def change_fov(self, new_fov_x, new_fov_y): + self.fov = new_fov_x, new_fov_y + self.fov_cos = np.cos(np.radians(max(self.fov)) / 2) + self.fov_tan = np.tan(np.radians(self.fov) / 4) + + a = self.space.screen[1] / self.space.screen[0] # aspect ratio - screen height / screen width + z = 1 / (self.z_far - self.z_near) # pre-calculated value of z factor + # the projection matrix converts 3d points to 2d point(projected on to the screen) + # as would be seen from the screen + self.projection_matrix = np.array([[-2 / self.fov_tan[0] / a, 0, 0, 0], + [0, 2 / self.fov_tan[1], 0, 0], + [0, 0, -(self.z_far + self.z_near) * z, -1], + [0, 0, -2 * z * self.z_far * self.z_near, 0]]) + + def place(self, space, location, orient=(0, 0, 1)): + self.location = np.array([*location, 1]).reshape((4, 1)) + self.space = space + + # this is the forward direction, the direction the camera will look at initially + self.forward = np.array([*orient, 0]).reshape((4, 1)) + self.forward = self.forward / np.linalg.norm(self.forward) + # this is the up direction + self.up = np.append(np.cross(self.forward[:3], self.forward[:3] + [[1], [0], [0]], axis=0), 0).reshape((4, 1)) + # if the cross product turned out to be zero, retry with another initialization + if self.up.all(0): + self.up = np.append(np.cross(self.forward[:3], self.forward[:3] + [[0], [1], [0]], axis=0), + 0).reshape((4, 1)) + self.up = self.up / np.linalg.norm(self.up) + # this is the right direction + self.right = np.append(np.cross(self.up[:3], self.forward[:3], axis=0), 0).reshape((4, 1)) + + a = self.space.screen[1] / self.space.screen[0] # aspect ratio - screen height / screen width + z = 1 / (self.z_far - self.z_near) # pre-calculated value of z factor + # the projection matrix converts 3d points to 2d point(projected on to the screen) + # as would be seen from the screen + self.projection_matrix = np.array([[-2 / self.fov_tan[0] / a, 0, 0, 0], + [0, 2 / self.fov_tan[1], 0, 0], + [0, 0, -(self.z_far + self.z_near) * z, -1], + [0, 0, -2 * z * self.z_far * self.z_near, 0]]) + # the matrix which changes the projection based on the current orientation of the camera + self.camera_matrix = np.array([[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1]]) + + # changes the orientation and in turn the camera matrix, given right(x), up(y), forward(z) angles to rotate + # initially rotated by 0, 0, 0 + self.oriental_rotation(0, 0, 0) + + def capture(self): + points_cluster = [] + faces_cluster = [] + for obj in self.space.objects: + faces = [] + point_indexes = [] + p1i, p2i, p3i = obj.vectors[obj.faces.transpose()] + side1i, side2i = p1i[:, :3] - [p2i[:, :3], p3i[:, :3]] + normal_i = np.cross(side1i, side2i, axis=1) + midi = (p1i + p2i + p3i) / 3 + obj.location + cam_prospect_i = (midi - self.location)[:, :3] + forward_prospect_i = np.einsum('ij,lik->lk', self.forward[:3], cam_prospect_i) + doti = np.einsum('lij,lik->lk', normal_i, cam_prospect_i) + z_buffer_i = np.linalg.norm(cam_prospect_i, axis=1) + + fov_val = z_buffer_i * self.fov_cos + visible_indices = ((eval(self.thresh)) & (forward_prospect_i > fov_val) & + (z_buffer_i > self.z_near) & (z_buffer_i < self.z_far)).transpose()[0] + visible_faces = obj.faces[visible_indices] + z_buffer_i = z_buffer_i[visible_indices] + midi = midi[visible_indices] + + light_prospect_i = self.shutter * np.array([light.luminate(midi) + for light in self.space.lights]).sum(axis=0) + light_prospect_i[light_prospect_i > 1] = 1 + + for fi in range(len(visible_faces)): + face = visible_faces[fi] + point_indexes.extend([f for f in face if f not in point_indexes]) + faces.append([[point_indexes.index(p) + len(points_cluster) for p in face], + light_prospect_i[fi][0], z_buffer_i[fi]]) + + points = np.einsum('ij,jk,lkm->lim', + self.projection_matrix, + self.camera_matrix, + obj.vectors[point_indexes] + obj.location - self.location) + points *= self.space.unit / points[:, 3, np.newaxis] + points_cluster.extend(points) + faces_cluster.extend(faces) + + faces_cluster = sorted(faces_cluster, key=lambda x: x[2], reverse=True) + + return points_cluster, faces_cluster + + def oriental_rotation(self, r, u, f): + angles = np.radians([r, u, f]) + c, s = np.cos(angles), np.sin(angles) + self.forward_rotate(c[2], s[2]), self.right_rotate(c[0], s[0]), self.up_rotate(c[1], s[1]) + + rotation_matrix = [self.right.transpose()[0], + self.up.transpose()[0], + self.forward.transpose()[0], + [0, 0, 0, 1]] + + self.camera_matrix = rotation_matrix + + def forward_rotate(self, c, s): + self.up, self.right = self.up * c + self.right * s, self.right * c - self.up * s + + def right_rotate(self, c, s): + self.forward, self.up = self.forward * c + self.up * s, self.up * c - self.forward * s + + def up_rotate(self, c, s): + self.forward, self.right = self.forward * c + self.right * s, self.right * c - self.forward * s + + def oriental_translation(self, r, u, f): + self.forward_translate(f), self.right_translate(r), self.up_translate(u) + + def forward_translate(self, m): + self.location += m * self.forward + + def right_translate(self, m): + self.location += m * self.right + + def up_translate(self, m): + self.location += m * self.up + + +class Object: + def __init__(self, vectors, faces): + vectors = [[*v, 1] for v in vectors] + self.vectors = np.array(vectors, dtype=np.float64).reshape((len(vectors), 4, 1)) + self.initial_vectors = np.array(self.vectors) + + center = np.mean(self.vectors, axis=0) + center[3][0] = 0 + + self.vectors = self.vectors - center + self.initial_vectors = self.initial_vectors - center + self.faces = np.array(faces) + + self.space = None + self.location = None + + self.forward = None + self.up = None + self.right = None + + def place(self, space, location, orient=(0, 0, 1)): + self.location = np.array(list(location) + [0]).reshape((4, 1)) + self.space = space + + # this is the forward direction, the direction the camera will look at initially + self.forward = np.array([*orient, 0]).reshape((4, 1)) + self.forward = self.forward / np.linalg.norm(self.forward) + # this is the up direction + self.up = np.append(np.cross(self.forward[:3], self.forward[:3] + [[1], [0], [0]], axis=0), 0).reshape((4, 1)) + # if the cross product turned out to be zero, retry with another initialization + if self.up.all(0): + self.up = np.append(np.cross(self.forward[:3], self.forward[:3] + [[0], [1], [0]], axis=0), + 0).reshape((4, 1)) + self.up = self.up / np.linalg.norm(self.up) + # this is the right direction + self.right = np.append(np.cross(self.up[:3], self.forward[:3], axis=0), 0).reshape((4, 1)) + + def oriental_rotation(self, r, u, f): + angles = np.radians([r, u, f]) + c, s = np.cos(angles), np.sin(angles) + self.forward_rotate(c[2], s[2]), self.right_rotate(c[0], s[0]), self.up_rotate(c[1], s[1]) + + rotation_matrix = [self.right.transpose()[0], + self.up.transpose()[0], + self.forward.transpose()[0], + [0, 0, 0, 1]] + + self.vectors = np.einsum('ij,ljk->lik', rotation_matrix, self.initial_vectors) + + def forward_rotate(self, c, s): + self.up, self.right = self.up * c + self.right * s, self.right * c - self.up * s + + def right_rotate(self, c, s): + self.forward, self.up = self.forward * c + self.up * s, self.up * c - self.forward * s + + def up_rotate(self, c, s): + self.forward, self.right = self.forward * c + self.right * s, self.right * c - self.forward * s + + def oriental_translation(self, r, u, f): + self.forward_translate(f), self.right_translate(r), self.up_translate(u) + + def forward_translate(self, m): + self.location += m * self.forward + + def right_translate(self, m): + self.location += m * self.right + + def up_translate(self, m): + self.location += m * self.up + + +class Light: + def __init__(self, alpha, lum): + self.alpha = alpha + self.lum = lum + + self.location = None + self.orient = None + + def place(self, location, orient): + self.location = np.array([*location, 0]).reshape((4, 1)) + self.orient = np.array([*orient, 0]).reshape((4, 1)) + + def luminate(self, midi): + d = (midi - self.location) ** 2 + d = d.sum(axis=1) ** 0.5 / self.lum + + return d