From 93c8c8851b360160c5325d6222c175af81e06a6e Mon Sep 17 00:00:00 2001 From: monologg Date: Sun, 7 Nov 2021 14:08:22 +0000 Subject: [PATCH] feat: initial release for code & model Co-authored-by: donggyukimc --- .editorconfig | 24 + .github/.stale.yml | 17 + .github/ISSUE_TEMPLATE/bug_report.md | 30 + .github/ISSUE_TEMPLATE/config.yml | 3 + .github/ISSUE_TEMPLATE/feature_request.md | 15 + .github/ISSUE_TEMPLATE/question.md | 25 + .github/PULL_REQUEST_TEMPLATE.md | 7 + .github/images/kobigbird-logo.png | Bin 0 -> 13209 bytes .github/images/memory-usage.png | Bin 0 -> 54303 bytes .github/images/sparse-attention.png | Bin 0 -> 46848 bytes .github/images/time-usage.png | Bin 0 -> 49567 bytes .github/release-drafter.yml | 21 + .github/workflows/linter.yml | 32 + .github/workflows/release-drafter.yml | 20 + .gitignore | 135 + .gitmessage | 16 + LICENSE | 201 + Makefile | 33 + README.md | 122 + README_EN.md | 122 + docs/download_tfv1_ckpt.md | 9 + docs/gpu_benchmark.md | 32 + docs/short_seq_evaluation_en.md | 50 + docs/short_seq_evaluation_ko.md | 50 + finetune/.gitignore | 5 + finetune/README.md | 78 + finetune/README_EN.md | 78 + finetune/data/__init__.py | 1 + finetune/data/cls.py | 147 + finetune/data/common.py | 188 + finetune/data/qa.py | 581 + finetune/download_qa_dataset.sh | 48 + finetune/evaluate/__init__.py | 4 + finetune/evaluate/cls.py | 33 + finetune/evaluate/qa.py | 593 + finetune/model/__init__.py | 4 + finetune/model/base.py | 87 + finetune/model/cls.py | 47 + finetune/model/qa.py | 77 + finetune/requirements.txt | 7 + finetune/run.py | 259 + finetune/scripts/run_fake_news.sh | 20 + finetune/scripts/run_fake_news_short.sh | 20 + finetune/scripts/run_korquad_2.sh | 21 + finetune/scripts/run_korquad_2_short.sh | 21 + finetune/scripts/run_modu_sentiment.sh | 20 + finetune/scripts/run_modu_sentiment_short.sh | 20 + finetune/scripts/run_tydiqa.sh | 22 + finetune/scripts/run_tydiqa_short.sh | 22 + pretrain/.gitignore | 6 + pretrain/README.md | 156 + pretrain/README_EN.md | 156 + pretrain/convert_bigbird_tf_to_pt.py | 161 + pretrain/create_pretraining_data.py | 698 + pretrain/kobigbird/__init__.py | 0 pretrain/kobigbird/attention.py | 1062 + pretrain/kobigbird/beam_search.py | 242 + pretrain/kobigbird/decoder.py | 671 + pretrain/kobigbird/encoder.py | 540 + pretrain/kobigbird/flags.py | 310 + pretrain/kobigbird/modeling.py | 470 + pretrain/kobigbird/optimization.py | 273 + pretrain/kobigbird/recompute_grad.py | 526 + pretrain/kobigbird/training_utils.py | 90 + pretrain/kobigbird/utils.py | 786 + pretrain/requirements.txt | 4 + pretrain/run_pretraining.py | 529 + pretrain/scripts/base_size_tpu.sh | 60 + pretrain/scripts/build_tfrecord.sh | 36 + pretrain/tokenizer/special_tokens_map.json | 9 + pretrain/tokenizer/tokenizer_config.json | 16 + pretrain/tokenizer/vocab.txt | 32500 +++++++++++++++++ pyproject.toml | 30 + setup.cfg | 3 + 74 files changed, 42701 insertions(+) create mode 100644 .editorconfig create mode 100644 .github/.stale.yml create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/question.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .github/images/kobigbird-logo.png create mode 100644 .github/images/memory-usage.png create mode 100644 .github/images/sparse-attention.png create mode 100644 .github/images/time-usage.png create mode 100644 .github/release-drafter.yml create mode 100644 .github/workflows/linter.yml create mode 100644 .github/workflows/release-drafter.yml create mode 100644 .gitignore create mode 100644 .gitmessage create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 README.md create mode 100644 README_EN.md create mode 100644 docs/download_tfv1_ckpt.md create mode 100644 docs/gpu_benchmark.md create mode 100644 docs/short_seq_evaluation_en.md create mode 100644 docs/short_seq_evaluation_ko.md create mode 100644 finetune/.gitignore create mode 100644 finetune/README.md create mode 100644 finetune/README_EN.md create mode 100644 finetune/data/__init__.py create mode 100644 finetune/data/cls.py create mode 100644 finetune/data/common.py create mode 100644 finetune/data/qa.py create mode 100755 finetune/download_qa_dataset.sh create mode 100644 finetune/evaluate/__init__.py create mode 100644 finetune/evaluate/cls.py create mode 100644 finetune/evaluate/qa.py create mode 100644 finetune/model/__init__.py create mode 100644 finetune/model/base.py create mode 100644 finetune/model/cls.py create mode 100644 finetune/model/qa.py create mode 100644 finetune/requirements.txt create mode 100644 finetune/run.py create mode 100644 finetune/scripts/run_fake_news.sh create mode 100644 finetune/scripts/run_fake_news_short.sh create mode 100644 finetune/scripts/run_korquad_2.sh create mode 100644 finetune/scripts/run_korquad_2_short.sh create mode 100644 finetune/scripts/run_modu_sentiment.sh create mode 100644 finetune/scripts/run_modu_sentiment_short.sh create mode 100644 finetune/scripts/run_tydiqa.sh create mode 100644 finetune/scripts/run_tydiqa_short.sh create mode 100644 pretrain/.gitignore create mode 100644 pretrain/README.md create mode 100644 pretrain/README_EN.md create mode 100644 pretrain/convert_bigbird_tf_to_pt.py create mode 100644 pretrain/create_pretraining_data.py create mode 100644 pretrain/kobigbird/__init__.py create mode 100644 pretrain/kobigbird/attention.py create mode 100644 pretrain/kobigbird/beam_search.py create mode 100644 pretrain/kobigbird/decoder.py create mode 100644 pretrain/kobigbird/encoder.py create mode 100644 pretrain/kobigbird/flags.py create mode 100644 pretrain/kobigbird/modeling.py create mode 100644 pretrain/kobigbird/optimization.py create mode 100644 pretrain/kobigbird/recompute_grad.py create mode 100644 pretrain/kobigbird/training_utils.py create mode 100644 pretrain/kobigbird/utils.py create mode 100644 pretrain/requirements.txt create mode 100644 pretrain/run_pretraining.py create mode 100644 pretrain/scripts/base_size_tpu.sh create mode 100644 pretrain/scripts/build_tfrecord.sh create mode 100644 pretrain/tokenizer/special_tokens_map.json create mode 100644 pretrain/tokenizer/tokenizer_config.json create mode 100644 pretrain/tokenizer/vocab.txt create mode 100644 pyproject.toml create mode 100644 setup.cfg diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..7f578f1 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,24 @@ +# Check http://editorconfig.org for more information +# This is the main config file for this project: +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +[*.{py, pyi}] +indent_style = space +indent_size = 4 + +[Makefile] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +[*.{diff,patch}] +trim_trailing_whitespace = false diff --git a/.github/.stale.yml b/.github/.stale.yml new file mode 100644 index 0000000..dc90e5a --- /dev/null +++ b/.github/.stale.yml @@ -0,0 +1,17 @@ +# Number of days of inactivity before an issue becomes stale +daysUntilStale: 60 +# Number of days of inactivity before a stale issue is closed +daysUntilClose: 7 +# Issues with these labels will never be considered stale +exemptLabels: + - pinned + - security +# Label to use when marking an issue as stale +staleLabel: wontfix +# Comment to post when marking an issue as stale. Set to `false` to disable +markComment: > + This issue has been automatically marked as stale because it has not had + recent activity. It will be closed if no further activity occurs. Thank you + for your contributions. +# Comment to post when closing a stale issue. Set to `false` to disable +closeComment: false diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..036bffc --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,30 @@ +--- +name: ๐Ÿ› Bug report +about: If something isn't working ๐Ÿ”ง +title: "" +labels: bug +assignees: +--- + +## ๐Ÿ› Bug Report + + + +## ๐Ÿ”ฌ How To Reproduce + +Steps to reproduce the behavior: + +1. ... + +### Environment + +- OS: [e.g. Linux / Windows / macOS] +- Python version, get it with: + +```bash +python --version +``` + +## ๐Ÿ“Ž Additional context + + diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..8f2da54 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,3 @@ +# Configuration: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository + +blank_issues_enabled: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..7ce8c12 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,15 @@ +--- +name: ๐Ÿš€ Feature request +about: Suggest an idea for this project ๐Ÿ– +title: "" +labels: enhancement +assignees: +--- + +## ๐Ÿš€ Feature Request + + + +## ๐Ÿ“Ž Additional context + + diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 0000000..b2ca0d9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,25 @@ +--- +name: โ“ Question +about: Ask a question about this project ๐ŸŽ“ +title: "" +labels: question +assignees: +--- + +## Checklist + + + +- [ ] I've searched the project's [`issues`](https://github.com/monologg/KoBigBird/issues?q=is%3Aissue) + +## โ“ Question + + + +How can I [...]? + +Is it possible to [...]? + +## ๐Ÿ“Ž Additional context + + diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..4dab74c --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +## Description + + + +## Related Issue + + diff --git a/.github/images/kobigbird-logo.png b/.github/images/kobigbird-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..6244643d5d3c0a5f759872be1b797e1a85879399 GIT binary patch literal 13209 zcmY*=19&Cd5^gjxCboBMPi%AIOzcc-V`AI3ZQHi(iOro%Ugn&8?|ZNN>%FR1SN*lB zcJ=D7yZ;VVkP}CM#eoF@0YQ+I5K#mH0d@J5wV@$De|3J5<9s%t4vOMJAXQU%C!ae3 z#_EzLva%r5pE5Ma7f=)s@V_dbD+nkK$d`X)5D+O)-2cgnpp^g70RsUE{s{u{ADthc z?cYV>bNjUZPYa#{`hO8~!2YWZ>XHNgUl~OEuh}{ke&uHaV=JNI00IJs`ga2bNz1_c zMAP`GtnR2TE5l`IV@0oTWMg1V?`mcH*9wHkmFrWqGIrD_a<#IwcHna5CHY5#>r?*Q z%|Jr*kBXxOFNwOW0+Fzdy)h9xJtI9M2_GyG5fP8QkqMWgh}eJ3KlgY^%p4tUxfmE+ zTwLf~SmIHr7Oc zarF&soE&*cNd7wd-}3J`9eONg zMLyHxl1`YJHc(OB91=%-y-CLSJF&EmbfvC%*q|)e z0wat*HsM}TWX{6=>V)iTOCH4QTWBa2+tyf6&4oi zVmqJiEDuNEFgAYtQffye;Bg&(eK>nEZMPPq{-P*d&En5N5`VQqf-ltc@IBtj1|i;c zJOez4si)>5G)SM7Xv*N8dOp@#CSO8jJmoc8OyCpTtQ%U(w)YF8@cLCW)?;YJ_QGme zC0rVQc-j`P7HUef?QY+gW#a^9Yy5)6o-nhP~7q@UtDTHhs%6e=9KWd0d^gBk?=%iaR-ak9;Y_q*Ei2<}w|ZFtgS?BP`w zZLeMlI~wdCYJPs7%eR+1ma<6?_VJmwIU@)iFpcP((J$-NW*3aj4QO9oU?jYlB|H;| z#SA4P3W;mg+G}G>FkuO!?s)aGG!BB!I{%I53Oa+`a;8VygDWDPsq@EcrXuOF+y?!z z>>9afpz79eI+r|i#L$@(6yiJdQCefFd3oK@rP-O-b#xJNwoSuDbK3dLl6s=wi*d}) zaoZRlWQlj%_z`=&t0;8LK$kDQ$%en2)H5ZVD!%<9XR{v*Mot~Hs6RTGTQPBZzZi@>yO*D!n{6V@CKmGI2Y7SIGEB^17$Qy3 z5e|?({B9t6WV!B!%Pra?NfFUakTWEPATgk(GpadTF-S;1aLK zDH`l~Y>qB6)FJJdClom(9(Hfma}Zp9|0WO$`u>c=@uKNs5FK^+BCd+;!OGn8R$_Q&&nv`a(iZx#U!hu|P9{{_jFK!wTPenlJHI;0QSo5?esY*Lse`En7qmq^t51n%O5eu2! z-Py`?A0cmu$*=a|RS!!;_E&xcnJTH%N1i&e6Ct%5yC{PGEB} zTMAsE4>EbL2Bf-(bky~Ea`JwSnfC*IQPvTVg9!{UzL1V#4{&@~sS?IUBGMV-Rr9O6 zr;7ab8FmahL=Xb*P2GyDu4ZGC=5VQPa0JDR8%CoCxDKm7WBG$hi=^sscsWXy%%#HW znLt~!z9u@weiqSfeT~3Mijw>2s4;lKcucS2N_RDyGkww&6rvR2%BeKnx}94<-j=G~ z-YS$mVaRf*_c!kYmzYfr7>g#NQ)#XVb0 z_TO*zN(20*7r&6lTj?F519nT4dBKAsCdT8dinFPE=M-b#zS$5;>tdeiau;Cy+?14e zEZvBHV{g+hQ4qiXcD(=Lxbl>&SN-q2*WdwBB%7`ZTATni&25^#;n^S7a*-Q;aK26f zoy3R*T`%Xf5h5u^X>mCLh4TP7mjlJViac0HHR9>(u_QA)$A2<3Lr-h{?uRYUINA^s z&4=+~2F`Ps`$GC|a_+K=^zPqgz>lG6gLju1c!VVqJAQMgdEWT97I1u>qd_KURT!Hs zw;BrXB1CX_v;n&AjoD5G>^5+7v`_2Ts%RPCa78Y^^x^wk$pgsQu4SLQkqf%oY9*cV zxhOaJwJ2VIkCOYBopUYT{;Zu(z9UoGb0LizA5qVmwM_UNU7*(=?lIgH0-2%vlOs0V zV|#ND+Io;bA{gm#KtKQIoK4al6rQ*3Vg`?T7u)laO~nml_Au4DpAM#m(_MXAD;JNPE_| zC+4VD)?U^G&aY*jY}4gZOdHg=WVS_C2Ix}j&+z89D09xkL6+%X26GUDXv>-_2zS+i z%N)Y_NPn{;L(+2y!-dBFo?4VFO<%&^3Mo5GGN-TJGPBb0fv@v)UeDlc+uw1SKPpE> zH?joT${#t>kW2#uOEnHRb~1C`+fMSW$?iNMGn)Z;h8s{#5QR_Zt4q>)$ItqNykh=+ z%e5$+B9H`}mZ{p07p}?OgJ|!;K+EhMFXvFCTx`6_WzVOcNH;AttX7e1pJQnqV>xvh zVml&+@9$NWITA#|TTX|Ol^d(!wm7SOF^5BT^_M@EGQ#>MCapRM>>WiGs^5k#lty3u z?Cel{;;HKlk^OEDD2bOG@=5q)qPDtR1bMoDa7;GY9rD&M*jZL3e`ZyK?C4v86?<;z zKWZ!OipoU3R6##=VKn*oS(YISR==QZ2QE^$?4_7w7YM}-W9IR}A^13F22jV7&me!M z3^Y;z2YDws9R~-;1RaBQE2MF|J$s>Z>T>Zeq!tAt+sZy0ZZ8Acemkbbi|B+oBU^x( zkOh{WQMA^OV@K9#mqa{%J8a)rLVO-xmJ{Go_H;Q1;SWp3h%ZerF|Zo@O~vn69PqNiq*f&$FRgxz9&Wyh6Sb>Mf=IL<Q=tS67^zr9$~3%M zm6qhx1Ffs8&13QIpAI`sSn+r_nh2g=jxfkljq?|+##oxoCGhx zij?wBmX;6?q||3aPjh@;^qjonlIRh}mG?QDE84%win`W%%l;W;Q!TWLA!M%W2DHbUiz>NZ7Z9$nZ#<&4n{WYJRKP9NY-ZLo^^PO{{_@ z@3q-;Q&T`ZKZdlgl^1S3>;KrS|MRHt5FsOQ>VKW|QrfNMSoRnOLXg6|kcn$d+K?Z) zEBu}g10jdjF+0WmD|@V&%49#wg9CYqb(p9dcAylG*WIyH?Jk1Qz8K#Vxe3{Bq%#2h zNQ7MNo9%1s-G5#E@u#qw_Pn3kxQFY-MlUS6Zb8DGkIj-o*!mUs}Nm=*%h@B7|;V)5GPrY zPa?d)o1#*7-7z{W+gLLEYuUt&T99iv)`_w8!kEwqk939|biSCRc&@AtGvbG2Vv(@Hfz zL1iV4_9Pl~Q#*ti*0`OVIL;vgCGF0VZ)b<9;lttDq-HpONKP@QDOu(@O;kt`W#bKy zN>MvVlJ4pz6X`;N**9CHsepuD!J~rta_i0?cyUFfd-)E1mI!a56CGeEh}e?@{n@`I6=W zm&-0UH|%E^CBpS<>dO~J9%B?OtD4vqU2P0<=eAKS^=8OeCjUi{;qinKXDZeF4v68Z zceG%u!XD`Rz$HEe@cMAUbK$R~WzJUnrc?toweyx=+0Zfze;kQ10S?b?`Cs>S6fR;=SV)oHbJFdY`V1qnF&tjB=21Jul zes#`Ne+pJ2Sl8WnTd-9b8LFg|<>R!8vZG1glm5xC~rfTrJq zCXMus{RylCyk(#Z1-}RX0J-RtdV!|JZ$(8HuxyVHxB}M?B=Pz| zxlMnXzc3zDJIHX$v8E{EMRs0M1<3$2((5j=Y(SN!5aQojQ4u(6mJ=S-EpDOeEjb@p z$In-&ij$E3fd9yml30+cR$Gu;2XlA}{^nmW3e9IQr8s*Xip8x&cY`bu);h$^=juHz z0;!Z;OIt2PhO~Wv=rDm21I!T#%mL3CJ(5M?VSCQKcJKl6TDm$Wb^1QQG2TN1J7gfK zd}3~#qEDHn`)*sy8@((1UT?t%Q+H^t~VN?-$5sZUh{0-$N?w8 zWR6Mazf*&kD@S=r`i|&HM+kWu*to`9bz>w-s=-?(IgpK5GxFSD;1f_fK?!iW{oE%{ zF>L7t2-9sAT0gRGV9n*X6CWO4VpA|4$t&d(pDh^xv6{lGSEGt_<_fJAEUM62fqbSh zPD;sg_Q$@$h@}mYDXU)@wwUfO$Zzng<7VzC%1D)GM?#S&weE8ySFll(4kmq9?rOdR z3u5kn8;oA>Yt%)(z(lzeT^5=$*8GT?pg~373E#Mrz0<$GQqyAwtzK8I(>{T<0(w4| zq~jeFEG&e)Ias*LrpjN}twY;G-Eg3C?FU?(+A0p%VR}ZNoYz}YQA^3310EuA;dhUL zQ=HNVs}#}ZrxW#!5j^;to7dMl7rZ`>4abKe{W^>^U_!Y=mvNGH%s(+w(rD*yJP8Uw z@-wxNHY7Um@)eNDJV^oG9jL9aI&^zRunv(fa4AZE{I67%doTI8G{mw-@GVN@gX>@V zuQ`xd*B%v(wm_YqLam9fIS8Tl*rOK?#`n~AhoIH4P&f@75(NQ$3E6J%&~OCnlatVpXHtrx^@8I4SCW7290O>?Qa9k>_p5V$ z!|97^ESqW4Y(~WPnlhfnvxuEtCtBK9!{f0oaK7)3sUWjRyXq})hO!hGpayJ}#uFDK z)E6>Hw9&n@oR^5ibG`3oyGmp(Y-&jXgQKadzPkN};T%Pn?$c4ZGg3k6;xf&xYE_45 zv_!X`9&Bq~=K49|a6i^dR)=Gfy1@1%1S8jk5Z+_}H-146!X$hZiPFCU`v6pGNL)`k z_)?==&ph#N5-^Ey4hM&D0@3}r)mbZKL}>ndg0yug6H(Dra#)Hd&2WzIJJs!yfWg5! z_{ChMWE?qB8lM^DZkCgtIz7x;VjW2Ml2vr7=|IHSJFPr{gXtU4y>n}8M@&N$lk|sR zw9`f;Cn-2c>p-+es^sDs`eK}$Yccn&>H+510j*gUdpg(7RwhE&VU>i5%-Fh34zVs| zv1QWT2S=%~9VBg%@7?sHVbJ7Z3VLZzYfXXrQW{VOy4W<`%Q#xL0yGQ(hQ#&wX>t#MXhB}5SopO!j=L) zpbCz20Fog_32<>02-f)V$a5qd260{@bdo4=#rZWyDPhtim8fl4@n)9-IKviD&v5|C z(bK`R91$rw>ysBrsH4%f!zEUNEl=;bL&0KylW07>aaLt9m9yv)ak5i)a(8P@U3neC zDbZZN1Qvs#Qc1}!WOc;BR|}%6o3swnu-`H~gI=f-gc?X`7Un z&FN`XQd#-T;k=#8aD6lX~g-Pm+|L^fwjwq=I$`M+>}{UZoSl` z82u_?b*oG(I*1oGk?d%f3DLH1*rVmFY<)9uE<>1@X}5eBEG@QeblmJeZr|o>H*rk2 zvlupmFhYF~f3>1NjwXlGaB+r5xlyHqmpj-}>3SiKOQ+pZc~TxE>&#JvhvNn!(c564 zOS)B?%lN`|iazFX-k*G=Fbp&WfT zDHlN~QtbC&jm0MW?w+>vT=6PLn89I%=4Yf-cZSK7}#9Q$~^PeO|ANG zP$dj!$#MqKN{Z=vh%1G{Yxqa7kw!J=uUSJk&@o&#NQM*?h;4mI1do3 zQ$7Mfxk94VU5Ov3dk1S>!A$a!5PA$4nF=s?I=#5r9EGD6LS%XXh3Pg4>_50$nX!_WWO`#ZL6$%bipD;qPD6=vO!?vBlphFrio)An z_;7)~Gfx+o*qj?ls~qc!c8M1gh*p_ii&7Pkt&D)Bz!IBD#1)M$l1;f&vTmPy!Pd9i z@xr<~#j37X^VY{Ti_8Mu?pK_b^*7UlLN$!>y}|HW^%V<6zC+JioHL^>LtcSPj~|$^ z*FFzPNAo3CV|#}tt%wIQl`QCfY8MhKjEHMn*!h9-qFgX4kqK~)wOiIu$fV;DkhNNE z=0#zd#Zq~bo-v&}vO+fd1i_vXkgM+Nw+llMvC%L`FsJdiR9aBf)aMl{fmqk>w{7gw z07AZ}GFU~2AJtH#;!zpk@iL=f=rl(q1#C=`KUqmGdr0)ys9{L#UZb!+@4Cv)xog-D z#?Xsc7{?Bs>1a7^;h;19^KE*=_|G(JLo?kby$+0OA!Yh!F7#su>krD+w5dEk8oy6v zF!oZF<>d)^-=%L$NJ@rIqnXEjAkt9Gcap9M?cnZM8Lh0VS;djfk?<|jGeUN^~&%} z3CW`)mJ#GM|DiYDdoc>b+lH5>Sa58{wDnq;;rwdr_S_2!h|L$1N~YI7DDD1J$cc;cR=_eW@wE4P#JK4H!jN-AHY%Q_a(9On&8~owNcf#h zealnQhG&5Ir5q*7eARm`krp8|=x{zVMzcvmClUUcJn5^A-bD&W92;x!6d}#TyMO=~ zfk@vWZ{%cZ1SV$@%6`}>?}Wu*6xS{;`<-|P`@t1P(pVM__uTZC+T?1T&L0{u2tv!1 zV;l#yzpS*-dlbR0H8}kaJDls7gRnYEURL~OX58SRL`MB3hx&#YCp51U^QF7K>e7gB z6ey+y<0yAi4)spfxi1U472FbwbDIj8AD^*X#cT^27JGUu$wqOS@OO3a<}@t8>?3`2 zfBadmJ6LV|C){anLU@f59DeYd=P2{8h?(GFt?yfQ$su9_NHN@ z7n4&Ss}+E2T1H31ypmT;cQVJ9B{E{<8ry*v?preNvgKK&OcpFoniOCKe`v-JN1@%$ zaOg6r#N=8gKK=m;x<0n`+8`M+Stgy(OmxPVc|8qX7E23C3E=l{xK(U}zb6rfp=$Xi zprCal*m|vnGKjGp+5AM8-h<8QT93AsNS%|sRtJfmtg6Qo)wHI%g)ZL8Qa`pTGUHp+ zDs{-7CekSm4^90RfKn$tzUq>5F|+wWV=YXE2Yb`H5F8^s(^WC7qk38(E2F009Yd9T z&p$GP(`8j?94(FyA;Y}Uo#uzvJwne45pTw%#lLZLO%+~_e8{+j;zdT4zJlY!fDIJ|$CO`r2itP_d zU^Bh%@&BN)>H4U8GybDnh-TEa=ifbYU;2^2x$U7;rc;RwM2)#6Pw)YX&y^30-~L%2 zHy$7IzkMgft-F6niQ=Ag6DLe~M3Of;RVDFncbpobQBg#oS0qd`eeNk4`(A@m9MW2% z*a1L}R8qlJs}%0oGuemOR^Nz3BJXc`A?8K#EaJjjY_i|-J7p7R!57-&)11ptx3M37=LXj_4p@*DJOx*){J{nX7dY#9so|HU6o9IHXCcN9EPMm0 z3N}gL&OJVF>3XfUfg7FV|5NMudq&Vqx44O~evf+fV^o~^BNoTCXQ5#<`Q-Pby7wdU zFj-L)@HLGpwx97A2towAx8G3r^~MX(v69cw&`+~{ZTz0qDe(->I?>|$8FjKW zT-jK|o}&Aay|MR3Lbo{-kc52b-xqn_Hp~DniEk<}3_2bX0XGtFQxc})Rv9fNd=VLD zq8{bTP|P~`o3`jYSR6VY8=g@QUA+32=rjx~<~&~dJ0COL=^ zp76Ln@8KsLRQyW8@<$gH&lr|lP;W;la!;E!VI5%N6Rq7Zt@)+V1iXq;zzSakuV&#L z>el#%k2B0&jjA{1c_iz)Lw=jaLw;)*RIbsFw{Y3iuu3PdRmp1_*%)kUW2yoDX!*P& zn#|7^QG*JJBFdvv;|N#m(v4l#ZxQE`Gb<~C2IQjkx^{JSA=2a%EM=|~<+^g&(@Ybc z28p3S_Dv}v$Mhp5VR3=a2i&@<)WP9~RGWD7g6Fg&)kiu)EJMEjbLk?SPhHnP=w{iT z?6qFL6$Sfcl;1oueZ0I2X+BrjxQ_0cX?;F)xi?FNs4G6_*Bf8%HXgZf!I{Jte1?a= zbVZejzb1Epbb7A&RU@fD?VmuK^`on}SB0EQEDpU7JP6m9_{I||T`CpeVo)_m0d|4x zu(WFG5Uo;ol{;0|u~Qz^#p;q`WMcUB7T$b)k+4AQ+`4jAP{Bm&clJtZ7h5gM1Uky| z;D`?kSM<{D>6x5F89DajJsn?cFr37Nwu=AJjyXB&^wOm4@P~{!8{H|Uo2?2|)7US4 zjd1vT!f9x;30~nNHqc~@sgh@srw*P}=Z8cC*UVGGYBIom$*9OpULoF8bHoQ!WhMk6 zonvss2+C8>ahA1G?YW~4t7Nr*d1zT#-U+!>w)C;VE>5*~+QvTdKAo1~B|>#Z*hTn8 z?ZDM5$4guWx!ol5&#@{|_WP{d&ddS1bZ$0awa)CEWq)(f)Zo;Q3A`BzQ3F1BB2V+n zvNQ^qRR54r5fuy7#$EB`Lavp9AHlynlsKoH-DaIs=+=?8W)Bb6YR%gnDz{F`DI@D^pN#wnX&L;KZEK!z3GnoAh4Lli5wj4%04=@RP6H z%dIc_ymXlH25}TOqL(9&tpZt=scxCGLw5kT*lRYeP~ttC-jCzF z>l>{Z^D^)>AMc5nddM$E=h%X7XJp_n>$ld4hqPKv?g*9_mOM;EhMmJH1!ue~edl$w+X~`|4*P{b3 zhM{icYx_r~Wkz;yV%Yv8ckwLvO}h5UYhoTI@npu6BW?1EM+207#z|(cM)Yry<*zOxyCvpL(=#>%DGm`J3UZC(%i~y7fpeKh&OYp ziEapQ0+5ZKi&3o3mCRUa;3=jxPGeP#75;M%5z0&Bck!c-oNhpJm!hF3mCV6Di3#cC z(&a?!=u16Xe$~l+1|K#{E=l(OgvUEvYv{29{TJp=TB7x1Ec;PnY1mr$w{Q#L+ZP(E zR*cIiM>kIp@|0o%-dDkx-#E9Ep!<*yY~lx=0mbGP(NkQPObssP@H!1HX{HjEbbz>M z>RsEPy9EFgDheIbxjfIeT^xe4HTkm!?>~2-(rZByLpr9R!^3J8KncJ-bQPME(Is=WDLa|MoeZK0 z;&k6aBlv4%hZYt`uOQQIpVNuO?+AUYTe|a__;P&C(`IPzzalX0V&|5&@ek1h}1_g^(qc6ewMs@V|qxwFFO^^p)~bu(^lRMG{ja z(WdGOXB$HuQ93wPSbwCsVh~%?*4Z^msO;^XYc4hi{ibf_i&~A%$y)feLcX^@h}Qr1 z!X6tNHB7oy0e997dXby-yX&yVifKLa+EQYo&H?=n9=s1IlDMIP-bQ)nZN#Oc1Y1o} zU4L3X!Kx9jM@^7*>W&y+Ru(?l|ByDLLcU+meD{9(8a=x z31?qpZmRw6-R{wRKHLQ%_lS0R7Z$Fsu*v{MdN6gpWHHQ0n^Yt0{ztSG>)0FP%#*xd zBC6%PYkWx@X5+GY8V8$uTvJ$!k#OW-C#Pf{;2{OcD?_HWLWnEitfzo9`!`Pq?KDoN z+`|gomUR~3msad!h?`hVKg&q!v`&`bQomRHghb|CW@)NHkOMAGL2st^DFs8CT**(L zcd>qrXBw{PU|qyftHE7rouJs+^eqpD*M^LR)_O}}331PXP(K@2O8KC@u$W=>lFoJ< z?Ygo7JgrExnGV5D=+TVKFC5PFSt0nwZTR4EM+vqnJt@_bCuN259rG%l^hfb_6D%8M z)YN6udO_uJf6kT2p5C1vn1RQx1-}_C)k;eFUqUXP3mlBa)g+d-fi}|C<46+KMdYLY zNjlm}`&mi1S-i$-`nds!Vf*O9m0m2C&y|5r4qeY^PJSP=Oc!CWW2eL+2Bfg}LSjr% zYOzo@GnQYi>G?-k(FbvoM+VqrDYjNytW$e_xtXBeMKpju!H4o{+vHi}zi5?fsdarZ zQdT5$QMIOjAE@p^hI)Ffrg@*ybzicF&f21{^4yhfy3ev6cL<_dCg7pmb^}^HB^aiy zg5FK2uN?CCFn6Q1@rl8OX4+sh)VB*6OwSFFHIKM@m~VDqD+k~>rwgW!AaJ1{&*?)& zt0AFyG>x$6;O_;_qxCB6=>od=fN8pxju`U2qnEf<_YaU(4a;$NwD&A;K+$PHBc)wDUvC3khbOkh z4B?@12`9H;#hs)!$FB4AX;e9V@bBQ?S6(6)$b7iDB*U_pbz2nAN9@6+`)Z>nZ2~Ae z%&v5UGGm(VF#0@eVs|he`q z8MxZkfjDQ!&QWtm%L&PfUQpn^U~XRJcTLmagz!w`4d*?PDZQ+cxYCSpEopndF(*sy zg!Pk~)KP4hqE_<7XQR#eLHt9-DLxhSIC{2o4I;xhdp5pZ4k#8W&XuvjF0(3Zw!Wlv zALCKNZo@IzY|p(EQvyD?uJ{;Y#__kG(kUCj{=ry$p$3nmxHgq?)L3Xaz}cBEXDxj{J4QNiZSu8#%F)y+l4`$H;>?ly=iAVz+r&FNtp=A?cCjsj zTeUXksw*8TOzmF%_%T`WrL<~WVcU4jLLn=^E?>u<;)#M=w7W@z|F&K8NQgd(+8(l< z)xo{*@i5zlq~!KX992B#x&Z|EEkEOUhF{=eH@MH!@D%$Y=5L7SLNaf!Q2&OBc(`~w zE9{Zjq8P1^ca%Szu>hfa`dAScl1u;db36aIkU8>L`}{kHv#Y(^?)e7Ds0V zyo>Hu+I-|g=(2Amg>Lt>_1i`}4pzYtGU>Xn);OfN*AhDg!{^ry{niR0*=%vIMwUSL z4LQ1?!7V6sqrL?ICWS`v;#o2AcLRipEUG=Q%-&f>{>-J$tVs5-rqT{rg>y}>qi9Wn-9#Ytnpz>qa*F%9?iepL9*wX~AbL23WAP@7##MdD3rc$5P)xC#+j91xVM zh(=#Bqc$a0%^bK`u91u*d!J#}{K9H2z7tj*b^5PR@5nG4H4Dza9^mNFf2L3B5>_T0`hbG*xw?%;!d zWo^9V&Qpx@JTV^dN+%0O?uOr8So#?jN-#6D9Uyc?)YfD^3+*aKw0=NP?su4C^y;ba zi>6~A`^}t_UtQM6O{$yEAd^W-#vFL>y$dMhNip0Ai^hesNwxXa2^GHahekNdqZcG~ zp80A7=goOQb)M+av}Ss&?Luz!^|(b8#HbTG@=j>ZyH?>b?nd*MoXZ=@j{QNcwLT0r zWB=mm8O}jufAFDbfC>v9dFE~rm4EMV;Nu0G8WenM-E7m=B*z4m!5Z;8^H&JaYD}j% z?nnHZ9S|zAn&$`;#)%u1m7fL_SU(hnKnCO-#we(a^O91IvNBT4idD7oX_mi2A4vpC zbR>E_4&9rTe)7&WUg zqy-p|x7Nt%fYaCucDjlqWy=xPWw>}srsrDn707D6#~cL^LhWmq+*^Q{>v z+B)Vx#5x>M+TJ-nLB=hp(&NB^Ex2}+qbey$pk;7X<3HTw5qPkR_zi^iF-C~7_I;-M zd<8HTX7K1Km47Mi@(^njt(uCFi^g2Y_Hb+5Nkk(BS9ZXTy0)cH(qIovq(D2sg_get z;wS~Q!~T=p53}CBv?2d{U7h3aX9Q;d9Ix2d3=BF)3YmqoW)Y{Yc!i#q`O??$4}{-ND0glkFc?A{VkPB-wnbe$%ZvN;@^BSfzf-Gfyz9 se(N@eUY;AV=HZ$Bzw|nK(rcfdKlAk!KYb|vexD*KDkoAUr04g40God-Gynhq literal 0 HcmV?d00001 diff --git a/.github/images/memory-usage.png b/.github/images/memory-usage.png new file mode 100644 index 0000000000000000000000000000000000000000..6dc5f3b26f798e0a92a231b8c2aa45ed50a53927 GIT binary patch literal 54303 zcmeFZWmH_-w(na7g}ZwwAh<(thu{Q)7r`xr;4Z=4f&>pif;)r|JZNxt5AG7&*<_f*y}^|Ue-FryX|MHO}z1TwHOcQ%H)+gRH=3A&3= z|CO&G@c+|oc53KfNt~@js9~>OL#6B-&7r((9BdraqNq?|N3*wrYAG= zY%j>p?&jvk=Elut=V-ysDIg%g&cVgb#l;GwV0H4abvAZqwRNI-%Hm&nyfk+*b+oj1 zwzRW_KILg_V&~#4LQM^P5B)oHXUn($$=%lJ@9F~$!T$6OJ0}|l`(MeQt^`#aEzNMwVncD)bQ~J@ZQ%Z$i!flZ-}+Kr2j6)Q)wV0&i^cfKdw|OvQaoso#ymEOGe|D5C6|9 zBoM>NfD}vH${PN8ml*tl;vdzK%KC-P0S7a|u$|=jPbGo;vv&U1)&Q#1ifd(GQvT02 zk^#L!`mdG&D#pMGi2^0T0(D(5{#kptcY^=7$^Pzy|I;SZJx6AFQ!O`FZAmk4+gNtD zg5?`qQqkO?z12A$+wQSk=$g*fVEU!TRCPPq(2R*sssh%XYrEX-C>U zC&RuE<05$9-wsCT5BS{Q)K~xZ+$*cLJoY;JvQGVL(S7YBcSPT*GR2x){;-qiwmth8`Xs z%Ezo~E6~@45c`Us7oYYjT7H8QRn)S)E$IlwGq5t^^`4WVEsy0Xns-JJ)SKb`>OBi) z%Y-4O%6^%sFcf1(b8<;$&)#-rMgZoRIw*e6oYt@4i(`cK&+{oOXT*cKAMxIoi_`Tq z5pkfT#KaBMSG~VF(mxN^n2Q%N1-On%6PfS-?yCk9u^W7`e2tbY3qg$oMN7-Cu z$%)EK@Ip8+2WGKbl}_ZRLkUU26I-*$Ms#Jmdd5*e`~OS;ooF>oy|I$7F!&ijPW-EO z#stk{Jx=x$Zfjqkg2(>1?p%YPo&mu}Ku>?ybL^$%aywjrVkmDzp~TL9vGS!KF-Tw7 zx70rZom01zJDMwxNAlv`Ye_F3uZvlWu_h0v*>^b~*HwIP;=j1BhI3iZR0i44>0nU^ zY7B|qTF-i&4w`3rU&KA$|M4wXS4lbqYP-dv5Kph7EZ6@S_g*x5b1U@zYAq(6=OY&v zmc_8hCB9xMlac4`L2aX_ZO!c4a?yzryQOB*=lnw%BD(f2whcS^MAY=2+_-TZAg?5OIo2(Fv?}i^%qo>J{A)ZabY2p+%e3$ZY8Aci!2M9+k!*re$BjX z)x*tRV-XgGkgHw?uGDXB1CP9>%ikZ^Slds~&kh%bsMvk7w2iKkTkbmZ>Q9G+`puK| zlf+}U2Gh3sS~bwds2_bFZjdhK zq5Lx!R-LbsdHBhXKf`s-Qu)saW9hDl=b4~a5tYA%v z#Qmg~ep>F~_D_qOP35GPZoNPuh*hojj>$j#T-m@m3a6O;z2h}~B&X9S+@w>& z!hT?7T5kVDAC%F?mdN2g+8aRoGS%t9ea*1SES&5GBT;UYKTiA0-!Wnj?yY)M#V?_yj zZcH@HB?yx!Ag>)-x#&7yAAMU~QvS&z!$W4o>1e90N-7ldP_Qm%Y5NnqQR^z+TpSoj zxKJoiAg~&Bco3-RpYnLRVdVA9Hpza7M*xgle8I7wl@0x6W=+2tr?QKfZZ*ztIF8M0 zq)0N`I*F3~=xWw!s+YpP6QN7ko`&rZ4)yf@%~`)7e_CR(o9!G--x~}A@isOVSxWC} z=30Yk3q6$O6i!n;-_FzQY4>QTQ&Zt0(1L7~XMxfR52po?O5=_Y2@happp!-4o-b@s zs2lFhANsXH`z}W0!v0>syZg8s81SI`(_q)H(M)fa>?!;1fn9Mh&jDBg8$?5h$j;Ip zI(##D%8pdW^B-aj`a03r%#XS$Ry(~jJOtK)i%m5FBjftmT}`1cUVU)2n+9i-ic8{w%oOu@i(J8~2x}xb2RJh=A;2T|(i} zq0WU&FlqUIdffzyDpIyn99eQI=i=tz;93`Ckw9@llv>u6-efQ6Hq(G&K!W4~)npj1 zc&SA559PJzLZ33922hRlELWQ(_&5kMlqn`A7vjD`5Dha!4&|9nHuSxp#NmGw%nkIE z=<@P^&1DoyB#9G!wB&XA3j50vws(&o>P?ReB0Pa}{0ono_Ut5L5YbRT-_RA_K3Kgx`f`PylF!VyAV(Sd0bC!h2DL8 zjhmS)lW3nA|0+FP(X(l}Fw5?2X}fGm3@2K!_?>r$H^VTmgS?bWGJjKbY{;Xq)ti-^ z{EwGi$;{<-?Rjn7LF;QCbm@Bftvh|PbAmDra1}Rh6RNWHL=ry{Q4rG1hFd~<-;z)A z=Xt$=OWB&5ZX=@Ipz2nF=&N8c4D7cE+E z#)JYDYC;or+0`Iovo>F*!y>oKWSVR9I_@O$WaMAXplJctuNQS8i70TpZe4A!{ybkG z*<&44@nuc|c1pdGoxPuCvWSRstTX=bFSQX$n5JqgKfFSMqsR@535KU$sReJr+6tW# zKBV2X+w}ANe)lb5XE|@f^Wev{-siucE3uJ^01*ppxqQj@@&3@w-LtFOa(uyM36+x0 zFLhThe;APv)`uvMg7yo0-%Fxkl*VPxQ6-Ug=T`pBvNjIKZ6m1c&FUrL3AttJd<7q-maSghg^^?x5Sop|BmABoFat}eP;n0=0M z`BmYqOJ8uvR#=*Prp1j;PxE5|yYAf>u!8Zx%TLNfTA+3)On^O1@@jbGki4~f_y>)uiLPs8hI~En6-a<1FLpYCN!wv zEDS|LJKXs~jM0r_B0mQUNYGfW$y<}K=TWnhhVA&w1jSS4YKRHbcTHXZ}CsJZyJPi@uAZC4g51I#37Og%8!)yyJS_lIthx$x}X=5!XN9 zwc)}{iZ$W(5*6IDjDI6KC43qT9^v;y@1EQfM&Cg;NXx9~U3B%pY#0kNKYoiTA=*yJtaM4p-&iz!>9~$13OXXzA0^)40fLmaJNcQX1E!i zsEVfVQTPaqU&r3CTwU)9Gz2PdPZV2sslLidh#AeXVWf#SU3_XPIj%YeowB|7#v}0@ zyrOsKFZK=vjt|B$gNbN#{R26BM8L0fQAq0Avu*z2Xjf2`H*JCanNV_Y#b%A(l zdwXl5@~0m$$~^^Z+F$h@9O!i+*}0M(G8DZk;kYZGcA(STKR_okL;Z4lQ)Saodu%~p zb6Y03u%U5f7D-gJVSi=r~e&Yf*w!%j+2&@PhLkya(r>7)BYrX$$b`o~bl{%`{b% z@>gIlru8Z6-yN-)i0e_m{{erAVpNqIkMSa9SM}{C=aE*B+IJTOr@gzy!}v~Gtkn>t zF|$B?y9DP3mYvXd982hX6izB$ZULgLs*u?F;PrmW(=9F>WQeV-ptGQ5DY?*LUCz8y zw6jK`(7f3)=_Ur&9;Fs7A~cr01~cZK*DPfM$Q@WBy716id5E;=zJ8d`cl!v(oSfyrA?cl}%^j7&*QN1^oI1Ll-Zj++ z&LcFww9r`E8voIyR9pCX<*h0YUeUWFsp))Sq%+n1Q@Ib4=u{R=)@iw?eCMRy_L=Pq zGG`DX=!bY}7#~%PJIc)4U@v#&(Y!(%Bjkel*2?q5mSwa7#{;aelKIvnTuHKRUXB$6 z&TSMI4U}o^3a54R%%h3S=4X)%-rnW&t*_Y@5kr#s3|B>ma}mDfV!)u??L;>0Vw8`1 zdqW!7HAV88qWOVYIYID9EI$XZf~^B$MA53*@3HQ&hed~Z)L(IkUae{6uN4|jsfieh z%q20wd6d_>EhSt8Az``gE2Z1OJizC|N>hKm2YRwgeZZ3ikXI#AMa^HNk;TsX^^ zFCOUgK)U#ND$P2ZDx8a0lab}NEvmOJ`I`khBW=s8dFs!}Woy)rYOj_*V#A@(p;Kea zlm@O~X1*vwqR6p@41zjcpD%~#4iM2<>$!TT+I+qMi?oDazTXmZ-Jl`e`@d>YKdnPzJFGM#M3G5ehh-x2gzki-H zyiQfJ3#Tk6nzqLqWoxR8rDMlQkf-m$3$Y|ZFOLP;BieNfexcMo27tr3KyEs_v?E`dxdWOj>& zTYp2sp>vYUTYQ6P2EHVF{kmVL-Ec$(n^421rlLv1aSiW~C_f~p!kuC0k+GY}mj}PP zZ87uJyf`9kWG0tm5v&9>k(CC8s+(Pr)vgMoa8giZEUArLi$c#pwJ4xe{u?UgTsuOuXareRWn+J=N<$ zzLG-!M~Wkvbh5 z86wgeA(3Jlh?e5GhWU#%j2WJ&DxB(dAF|nv^-*^B9m~RpfesUCu|vgtg{#QG2ynmB zo*uf(H!f1RdZ-bEgJ^;!I_-lC`?k$(h*VfJ?Fbj6;YWD(14Z1)2XJWX#XmQ}6 zY^zV0!$F^95hh?UAa*LYrp*F-`)13eC?;_nyN3{P9?Vj|UHKX6gYQejNPw$G?3VO!$J+CzS8>BIN3g?|TS`3|@|~HI0dH zi2sh?-yiYg9nhM4Q3OsaG3ziH1WOpz%+4$LvCG27%*IU6=&z3-W+AwMxCMq6So8cP z4_pdJ)>Z5^H8NBd??2uU226$5uAEUd3=G)Tx?_?7_Ar}i{8awe%r1VS(al4@uo>te zV;`3~HInuEfyU5Yf4Q)`q*3BGtLF%u@whzC-qos}eWF!-Kbh8|^I|aDO7QiZpm=5% zA?caJnM+>NSKBD#U)fXGzz`cmAXhxk#t&=5>0esyj5OUa{#WrC%pluq0E9SILiab3 zfWE(DOA`FWTA%|mg%an9D}c(>qrjEYfUCwoK(_U!A;6BB*$5Ys-Sy{ro7Dy_jUVS{ z9tHnWAW`XGiLxbtMA??b*-YDg5vYu09IrRs{;Wjb4HdlnxO6>G5IfEVN1Wr5jo%~K zN!*$rb_4KY^^4yh{@}ZnT%IgFT=3NcMwZjiSpXVaY`n4{BbN`G2M7K6&&%J6sfPgF z*lad99aj;{LyzB;q1%v8_ARbH_4pZk_I%#cJiwvGQp?4CZ?BkiWlYgmB2$!W*DkcT z^5-tZ_iod(`T6_1A?;_u8PQKe2R~1Iim?mCD?Rb~*^JoZozq_q1?`49co#Z|{m>VI zD*Kgo{CCe=zC`ORY-k8e=MdCloQS&Dur2B_rgMo-^Q{uxBczc5#ea=ovNAU}cbqj(?)^S2$}CIal7Q2C*KSpmCjsKK zn>iuE7BBU8*=@MJwnvivxMUfjV`WPQjv7r+MNPnho4K7X8asPX*{I;VKz_IyJTJyY zyuLV?7rNZsOtWM21=#PRQERLHN-$9$U79s@<3UZ@+3qCsS7ccC#gebDj_>`sIdC(c z%&v*kUoQ4gQ(K=Fpw|uuP!9;io6~yAk=J&h5`{33uh@>rJvP-iI~HM~2(O#tNqxNi{E`Z6r!BXyc#e zM(oYX4$Sy+{YF8)ohFY{+_CK6pM6r$&$maVc;~ITJvI`x^}e(PA}K}^vYG{A@W!&% zE~w4@u%ZX9QODHb+_%!*`lfX44t786>6tb;WhooXBIZtTw=_gKhcAo7FIu@#^!7xbrDpl{%RZnZQ%!tnY?HDScq= z8z{Kv%3~WUBjI@T4K?@UnGfsE{=5ZxOc8^?kY@J1?l%l-_{3*<>q{!?r6J;Rvr~}X z>Dqz8=Y0LmJiO0s$(x>4w=!}O5eM0R^Xlm(c68R!8E>PN}nqUM0Db1CH@ zLZ&ZL`NP7eN>nz;>%g#~sa0yT&jz7r-6tVp8z;r|6%tx z64}E$Q|A?c$YVuF+%_GxqqY4h#G~<3-JbbbEbfOuDLSg?qFh&p%j47isu8yOw1TLq zqT+{wRyQ$p9}+;0K_JH#riC_i@mLN5L;^&0L{kDpBE(M*_qUCTv4n{BkMXZzjKn?8 zPBZomFMpYYCk!I94J+-2kgjS(_o(~N1r zy?hvOJJ|r{tbQQq(=gD+gpeFj^s#gyH$oO2n7Af!={HA7h^FVoJ|obqu12F~_Q7A6 z!s*C2Fii7<&`e&{Q-PSQTl;@-fPV#V_4^||Bd#k~$7#J>@)ZjU$ir1UOtWhy(F^pG z;Qb-o_HhJdBfO7tj9`jDj>qy+ho_Cqx)Pgtc8aBaO8MtA(QNu{Ty{Dpm~ERFfx}A` zZ`{WoniynuH0-HLab$U4z?Fnj8_mCpTz|cphF5NW3;#0!dOM6O*vGS;?)KZ^mqz&e z;J(MZmh!fYS=$-2hH}kBWEGW)D&n+gHlho@R~KY zg8FMYK#cxl)!!f@fcO1o977rXtWG&jtL%-yr9HOzOP_~p3t44imN>JW)tkoc?8pW0 zQsTOs_5dpI@y_4$20=R`UjvhNv@2#fnOSOCU6>tnsh`B~W0R z>a#Iip?m#HE#vGBd1;V$Rqn<}&A4-=C=)mc`_v@Mmo!OGW+Nt#;3W6>ER2VEng@;- zCe-mM3iRG9x^4vL3=&zw9?`OkrjAy*x^mM!ChOJL8j02(iF{~S8XXk)VrN0F^vA^b zaGKNHRuuQp(RYj?#xzRNIvl~TgXgIux^ovzQ4)8gyw=W{;t$XPKV>DmNv{q!nH5t`$sDf4`*t@%Y|e!NH=6@$P*8u5#QB!38PG(s!_ zWszK_Z@T?8VW#uDxn4vAMDa!2=jw2U>k{MUWcY|Axv?lvr$10nSer&G${wng9E(A7 zXFf4XmWnH|TO0W!s68f-=h65XA*4>ZKd5eP=TSfFJsXJ;Gwnj37EZ+$C*~Pay|K_6685<^Ooubry=t-hg(L8WC zMq7Aw_xq^t`W6=o+S#zDh++u17k`e;mW9@eOMWrjBd2LAb6wSdC4Y)`|96{4=;Nf(k z=|O3pJRlHY$t4W@LZC+5o$2hu!Q* zUDORPD9(Lx+I-y59bsm1YhG|3f8~!h(=sG5j$SFeN-Fc)=hkN0bzm`yxoD2+OF2#} z2la&z0bCP9viIpnrMm4^2=mfv1^ocZsWb|7 zx`zxcZVD@zJU4bSl?85RzP7@s$XHx+g&a4aB)UEtmYs6hWmK`c)yA`sT{?k4M)$ZT z`Sx&OEk>+dRd2EN;d(36?Yw?1T8R9Z3hOx@oE$%kOSMoWZ6W-jXJ3g6Iv0AnpRUG( z5qw++ErvgwFJX3NyX#o4JlhYAUu(b`=DZ=k^mgzwL7&ID${ECs*?n|qy~C^d`;Z_r z@@)SklXJTkD>>wUz~;<)Cr*m6aKvQj}Nzt zCK=Fpa^-hU_FWNC2H$1^8;%_PDo2p51k{OBnEd9y)PIC=GBTsdxcj%^7Wp=B2*wNe zZKLjF+P9;Wr}IxU;`J5-z?p=}l{Kn0z~HCZes7r;UQ5Hd*$K~d-zO@mc8Nv1g|-XY z*xZrzgKXf0|5}RxoGaIW%C7`>J!q&|4u^?#vV#oW7&IeGv2PFsKgZaNGjGH|FNttr zGbpw(g01vrBLe3Jrsu(JnzT(A=t)xT9F@k##>_;eHqAl3BrgM4145`DGeo>90XvqQ5+k2@)LR5n{1FkyMof~H$Noid{hW2 z5yvGoxAf-x{L2moOZLdMKTOrTgfvp>ozT%n9(?<-2J)Opv>&IFJ=OC1Ah`<5zF_Av zpwDtVXmvqmbbog%`KFEbPWCg?f?_QyF?qY?kx(KrSSdJ4{}RXk@b}N%o+EG4{^}2h zi6@%uUOp+i!ZD;RCEv+;Qnhyr8t4=?pL@tMz7`dfmNRhYvDf?q4|_hcrKdzU$Zd36 zK5cKZFIHf9*V>-L%+ay({)Uf`8}V&s%7k+&%j1|7l+s&&=c`tuRg=jSOX~5}T9x(w zOjfpHC{}VRps!+XBKo%c7UK(G8S=dTLfSiqY=l0OgU?`?PJL)}iz|2d@mGXKDP7%8 z<*pIyFYhfhLT;^-)m>8q;`xS30`&H&=PVEgNE=1uk53JiZRdbsXP@PDZw(V;(8(JT z1$ck2bC*1St%LH7Du*?*-2=CtqmYkZO5ApBY6+}^bGrTnusUa;y#*HfELBvqbQ5%= zQ{r>rFkoLU;u2c*O38g5y?mve2K`W+muyP2%!5T?KR2ZV*5c#6b<=U)PsrO~uC)J) z6)Xjng_KtG&?A5z-wL+I*j5rGk!)$qe-Boy5^?2q4REs6gVeQ|NSo3 zVnEl3j{!F|P4G%xQ(l7N_*8V3_qeLx{P=PS=~S!a`MB_NQXZczs_SgIcECD(8=(f> z!#4$4J#mVxci5H9Y;2*T7FUwGUI ziqxf*2CQ!QDF%cpWlqFO!bt#VL?D9q_9F6&ObDt?9=uY|&$zj&Je+i9Dhy)@r;}A4 zz40!VO>mz2M*8(#6F$Jq(1~;YWwpQ2cv4=H0+nX|oZnZJ)nA|#w{!WZDD%P=cYlR% zhf;ehL5oT{g0uy#=!>RK(tCP8FSJCJcbq#JtSo0vCU4{~8XN8}<`=sf_D4IBrd&)7 zF*>^P%XDj9DeWveH#><8_nQr}E(J~or#A=F8pH3cciK$+Dv$Eud1cH`e1Vhd!rP`5 z4%L>w3dFF!p8>Sx1R+qM5?0n!SWF&`46@-iu~v(O_+8gC-++M+e?)Y23^Iu8kbR|r z3c=F9MCctrDzz&NY$Gv5%QK=t4}c!Yb>9An2StvY4F1&EO^_rpCr0$m%vmzU-Rx>0 zR>P?@0s(2#pw;YQAjQdF_K4#r70;r|kBFI(CV{%V$L5z_7onzn_e!)+Kx6=48qER_ z=ypc=ox5}{c=q;3Q*}MQ@4h-^kCis`QawTw`ZP^P{E#Xc;d@b*Y0t|;R;#ifNlK|)oxv+;p#22H!=HIasT`%=Ocq}p zLXN`O4QH+&#}wJm*`+gN8Dm`{;znj)0X_lBrw;_n^3+Nm8Gb%;r zvJF*4w6MiTp(bjRpSaS8bPg-xphbYHaM;(`?iPEcutYQfeWPz3>gmUEKnW)N^<;lG zga@mH+VQu?7D^_`Y4D$KpK5$C-*;2E;WyLnp=G+Rjw_@pB*5tuq%cOLhK{RvnLo?f zm3(`mQ(d^A?tM5A6R-GeC7DS~WT+!E0BtGeCSBfRY=bhcJQ-+ytffC0M?_e>e zowvRwjh@SnHk$#9lYys>4CUxpy@y51k>sSgmJuVsE)cXPn3$C%aKsm?(4k(Z6Ul&~ zrC&Ki1qjtLZz~&pGf!=5X1lzJ)18_FJ$q@BpF>vQw{kxs;Iu9F*J^mLL}FW4rYT%0 zGb`ly-Y~NHgmwyca!?=pPM@_7T}GedIXEEgDj`Fi^oiK?QV8sKkYUo&!MB8VY-IWt zYFp0N7MiKAjUIJ>e$_=0vwEX~iyFTX0~DUCw>02#Z+iGuV;f$FTUP4(jxD^LUpEa5 z&ZwuqBWAywzb#PaI`M-Ky!NI^C zz~UaF>{D?ZFzEHppsPEIga*)c)s_Cdm>c`~;WB}Q1gwPes%>%kI>U8(k*jI+H~5%r z@kB0lyev(FO-IdsqpS~(1k3_xw71!o-`9dL@5R^S1gRevtj{T1>J6mZp-X_w?1|E) zmrz7*KVhaP$@a6@6U`54x7bc1`Q==fVC_djm9pS>(M~>?9WX?0`FHIXBWHEaES5YDQ1xh*hHI+Czm;o@)PlU%8$ zkFpFHU5vVRKYx_4pQqR_(Lsab=dk=FvG;o|F_0dKQ32p-Z&2}spg_ix`Aq%Uh=S_g%aFO!n6ax^07Ka0qcqf7c1;lwZ=+h^AV2?; z>HFr@U;I)`lLX*DGr%!>1CVSw%>S`B^VxV(vV=i(PmB;ujmGSDl{18AF#@#p9D>m) zMHk{&mq6nkg;ZV#CS&f;y^RsQDO8L{ zkff1`iUkCJZ8*e>yvU3GUg;kW>)Y``aznsdcjSU_TOHQA>iGej^HVXf+I7YliKvU- zuapjni1u9q+NUV-Uv&jqxIK75D+%h#4kdkJxBJ|(zo5MTWYyw}5Z+jVK2SXhp7>n8Vlo?-B^)aCMe4goZ zQzZV@L#r@nN`S#^8`6%GC^l@i*x<4_^(t4fer7Bj;Y~!D2|O6V?STRB(5-n~7|@Vh z67>mQg=dKCVTs+@%bv}8qPD*jWvcE4jPU)lL@t0fcO8&tvAM}{-$V=@{qNAu4u|Oq z>3za(Mo9mKTv|5Ka+LC#PbWdYB60gF-(36-vlo)*NS4G{kvhGb%HnMVmW`5bWmyvH z!Kq}@X%nDSHvSXm z2JihfRffF2}2GoP^QfqJ}`~X`Wg)xI@Kx`F07`Vi~59%_A?9H zv)YYNlbFyss`Vcq?iXq0(BR!?6xzfQze%<4IATOnB}qa~+FCO)*3i(WU<8fe zHqw5=m#_0IhBJl|^9f(cVXkY)zgS_gBj%>n+h*9f&~Ax&CNigwhSv&&HYj~}k=3d# z*%JiL>hrV&8LFiQ`|(9^;6Us59%r6cawIt?q0YfB2&=MlO+8db9j7HhuTOir5Te+Q zO|kPX7ghcp2q z?&uUI)mMo`f+~$t3+E)yS!VW3>NOnh_w1UEL&g#0+?hDxQczPr*AErUZT`HEjuJSm zYaJ9i9}=?4=Ro*Ug40S0^?UDNly&sDeTE|_`Wr?9a?i)a@8&H9VU&p=$uJMj5itx! ziky?~F;tH+cTY1idcA4n8KVPqMJ~9)V}V^lSF~6_qY!4^boSqjn9A5 z6^%tqPUk61lv@OYPOoHY`GuiAv8}ViBJ{D6@$1NdMbT?I(Od%HrfSOsxXxXCs~I?A zy7vH9%LL^+g4fj4BQ?29cJNvmh$?D(!4@z&6CthJZfQEKL+?NUSKP z-}CG;^@Dtx)6iXu7n*1A#!zyaZ3Ck{;4cxd_zX^x)$8a1!bJ||qU+P@Q4L6YD96dT z;$YtTbR@8pTx1q9A-0uHr!JZp!P-AfZokV2GT$q{*%3B~B5gTMc_l$_4tTv!Up@^5 z&J_3*fG|xCgVCSRMHU;JKZH{oBU!Ub8oJ`_4Ht%hL9|8{O1_GyL$_F<3xPXs^A42` z0h4keIgEy1RbwT^0`qf;mDdk4QKa6D@f%i;tq*C3!$ox66khoYIN=*c87rr-D}d@{ ze#645ed^Ys!KFj%;{kPJl@o?X1?Dfr%J1!&tgMCwpQQaFSM zY)q}0stUSKfT7Vs|2JbA61>6y2etYU9Vm$^o%8RNeh=UnprZTlglXsy(u8mCd$B*u zh9Sz`n}85TB=~QV_MDwSlL!5J zT^g1_dqgD_X!LJeXmGMJSpaV}ot|U(_Y2mWn?0!4MwNMcJ68Mvb{J_3x8m0;DH6bb z4*~ofjq2!H;81y^$%?<3680*WLM?{U6AJu}}Qm!j^|^oe$`tY;*vwF;0VqDU`leq41(|t0d!R&%K_|!CD~VOXgHkuyOkO>oidDIvbJ~yHUFSBIT;gK zOr7f8B|lu(KwGe1JfD;Bx5_A$-R{K-T5?z(IBImb}5`AH=G_-I44(XHou`jc0P@SY%%~Xt+ zC+@L>AAblBtL>}tD?*JzWqtK;zC<$XtK4$sA1teF6w)zyW_OSKQAO!}gHn?34pnS& zbNQK3e_BzNKY?K%<4X;0$xABwTGiMTjI}_Ha1oWRJp#m_=EeE-r%SU<}VJ!*v?`cZKBx+WaOF0GA6&poJ(qUUW#LorW`ba*01Kf5=XM zSrKgJ`}XPZV!@oV4`nxs&3x}FCDKMxVP_w)KL9P(UPcIK0hAR3P>DS0D5kQ32x9S3 zC{WsqSDN+CMNuNhxkkp_l{Xk>eHeT_YIbe``|3Z4XQ9|UJ{IxHoS{6xivdN-UqX<7 zytuSTD=C>C^2Pm{fGgVpk8jKFdZGyBa$k0m26$52`!F~z5P0ta_WyXVvpQ?0sMK5G zv)IZAx9jy@38Nuqg)5)mW z>rAecRp@@S@66^R88us4>NQMn;3~S;tXeO~)IzHK9`KivCq+`rK_Uv&1>*mE2u2k0 zh*X~bG`)YR_ZL9mw-_SVsk1jmeeuaqS+D8^WXY|2N^c|aK(T@K;5F!}|m^}+^lx|r= zL7>gCAqNy3jv756cgA?F^vMz`MY;6)N_4z zvLKn=hguvklW}m!Gc;=SJk7sgy+g00K>%C60FW{zF-dCXuXW^8j8(PG`rXQ;G$Gh*f!iyPM8FnaDusV)D*-0TN)aR901~ zJq12qdK;cB0@2cW{}Q@fy=9xe9++>wJ=|N!iA~59u0>y(UIYSN^;0@9g_MAB)7?$i zp)@{rD%RbyozULbfOTT_eULX^TkBw1%>Bs7ax^>G8VFCW+DOt>XmmX=8_kxM@KgZ( zX*yM&1iyaj+2odpLA;6@D-ffT(!fGs_6u=;6TJu2ha3P$2z zIP)o&-F=eyG>Dyrp8z3;HYkL2&H80Gee_@zk#mjY^ncNi)UpNp4h+Y``mH`R_9o!x z4d5%h#-69TUW#yeWp=fZw~Vr4CI@GppG(eKay~6kL{itMsxrV@@j6N^s}6$wX}Z#? z!|#d_Wu4SlQyPwh1!{P?;1m-@%F{d3 z0QaW;XCPAia}+VBBylyABR6J5G?pE)>MzMlF`8QfD4L9KsIwjj4b~^k{Q(S+Qi;Fy zE&($2&X~}b=mQOIe-2NAg(ZlU1vc=n0mFO)pmlg*#J^;MoffQsxXk=mUWj9wO-9 z2M{K9Df!_kaO+TLxrKgS0i23oj~ZrGyK2{{|V{vUFP8!oYtkd;uJAMIzy*d zM@77Zu2D9V3Y(YPRLt6?VFBZZ1Po6rnm?hypUx+L$`}JegcQi(G=$Yx0VE&^(runq zD{iM406|dHjb9scm~&_u`x!%bmRedGA(qCV?)TXNi2?rTCZR#&8>EC%EWyYxWBoDq zVo`HHdR0zAjp4%1H-OnczI-V68C!L|s35};9%}Oyj+6Qo05g`dB;bD@L<5s=j}#K? z1)!&DDEYZ@>|1I6Bo>w|CiE#-4K#pBa--A-kIk0Ge90?*M1~RkgA5^39Pn0D1`j#4J#B6;096{c z_~zYp5c{OS$_XpFxI)$o8x{#H(W|TnX9`Mqk#hB1%@VC45poE2T&C~CY#bh<;&Yvb zx(Bv~ZE814X2kO5%H@!*H(^(h*WWJx2`&7>?f_X#xt;<@Mf7?-3!OB?Q&nsUAu94u zUb4X$v|JR1`i=kpA?>Z>qHMdh;TafOkPwiP1_>zz1QZ64kd{tCB}7sX5UBwqlo}f8 zZb|7bX;8YmOG3K7bG+~S+56e={@!=L-}f)S;hO6@=Q`K9*0GM`SnT4%aXqB1kz05> zG6O@Nq+#ADQjF!r$=*&$n1Va}$B2Q`^v8ce7Z0rdC&)!BW#A?ylqx^r#Zzerzo?A< z8ukaTwqf!%!$Y4O4DCJJjF;<-q9$`~81quFV{T2EX7Si})=VS18|$mRLfiI0p?xxr z`3Jk0!cO?Ha;C6$&&uWbcRF``DuH){m_?Sa=zkkBJm?!YOS*TsG2H!uh^W7$J|a|; zjkqzgJG%SgrYI7J+<+;vJBlq`b)=zz)4=5EO|FTjKN*rym<1__j=G|M)}G8Yox$zO zp?O))bs)u@mFhhO#Gxis0Vg>OOYt~8TnTq7)kgvH#RcnU3vMQwlD1Zn#LKc30&e@(>Pt1I(u>>SUc;jwG2eg}$ z%XHwygqLz=G^Y*&U<;G6;<)-Z_0-*ql7cywu3#(8Iz z5114LxQt|daRbN(st{jjbI?+f<3-KqD+<&{Y!%}K@C!0ZAa4vldE?z~-mF2a_9bkt zr2ZEW!}f2&Org5WZ&>p}&1;k-9!Add@SMzm)BOeK-~Fcq){S< zC{K(=j4i{}cymncJ8$tB0pYsbLS)5yg`vmKcqaF%n9y%EW_AtE_i1pHS-!Bk@Lm!x z$ha^31IKKx5hp>9Dyw=voKd{kNqV~d1dz!K68N8&YDz;!TbUL)K;@jJHY9}TB=aBbfc8nr*?ZOoI}0JbnBD<4l&VMvRmARpOHKY0SxjtvFOzIwNF+$c)_Aly|X zp}s?pLo0Dxtstfgso2eec`}m>t}&idT5>-7)jJ*+Z-W*#Fpsn?vioVArsn-`I@1BB zR`ZLTv}qkeIF0S7+ViikkT2F?Q>pt{RQ}Xl=GeDRN(p^P%fnd|?Q$|05Hfe1(>but z1T04KRr>NgQdukYngSeIXvwbQ9Y%D-lfMU1;jN3m*H-tQImLTp>$YD4pKcGP*(4jZ zO%k%}5L%62%;bdEW}{o#fn%(u?U6W(Q2dXTxj<5ix|hXR4)e>97b!#|cF&Da3>s<< z8cl_+E-#|zM(^mcWoGRXtO<-NJIbapZk$rh6g>mIaB8=}9=C&D*r?5Il5+zQRB>z} zgFiS5_yr#VRek+clOMv6HylW9V_^Gn4Y~Rl)OVH!iQrvvoj`L104G_>9>#7*s@s~R zoBFltK%CVaDTPlchp8@$>zya}p6WKI-^UdE;-aERyb%Gz=A|19O;OaN&t1OaMXd|) z5I)d+!?#wd5oq)E#;PYrtlRk5!2cCxhV5UJnS)34IoNao!LQ{Pkjm^o$ua)HHR>_& z(0WCkro53Q8>6H4nSw?B602A*r4GNosq`}cpHgN<8vmlqFzRQjPGZwNj4F(Mm3Ld= z^`w%!NKF~%#c#E9^*Aq8w01zRfaU(mjj1NTM+$p6bRUF6V)$o_PP{@)(?c7L(1zP8 zw-nX?0W;>`x~!9n745F!_Sja&UIn!ULZ?|-_zhUP8-5dZ$QWu3XBU5VYHFwLwlRxm zYoUAqVB)LP$N+LJhodNWC<7`nHF}$uE(@^JM#>EM>Bgez{TH%~ zkNS%ho{>Vs*{Uns%Mza;YYH{lbsWN1EPqZ*vLOr?n* zC%tbLrHQU%6^?pN)*ux;ZWLc8`x&mff5OYa+&>Tj4=jv9#ox`Kt=73|bwaxAHNAbc zBWbxaFE8vCKqd5AhK|xu;tuKzxX&2d_v~DhCcPG=ZTx57Wj-m;0?-L4K9s6q;A;$c}B|6ZM%M~X-f2p z?E2)Dv`IPK7nOssuc2V20dR|I;1)NuQN$P?z}QHxJ{LaDDZeQeQOue^-SdkIkS-PK zkA!su9Q8+*E3h*@{rPYIfBt(K{P&x`|JzPFS@>M|X!n&;lJ14?uR-lhKxl|ij*YiJ zz4y`_wKmbX4AKz$*Xw-=A~kjMC)JSiq7>N|IekjJ@fDW>sOL?tCd}Tb?@MZ_0GtmB zwiKK;i)P$n;HXY z+EXXMnYk0;G@kwJD5o5PFVabh0)9px1l&tRtua{!Tw|E+`(5jwGQ-NQq!Lnmr<}U# zLjSNlwA07`&mhLZ^ys~-w@8KBeK`uCi<#S4zHi0i3l~NGC9bH^_$D}J>qXjb zx8?Icv-n`N>e7Dpsq)`sI{6daG|;^4Y}?s|UX!#wI<++%14!yyKWR1Neo+ zi;{~_obNg;UmR=&s8>ZgT$5sd@h91Ju{8%svUkcDR+N+sxCKPp>~CC%29t0%NRVCc zO77lUQa35u^o5tB4*u~>jr{o_$oJP(0LzfclKpw&Ych`T9pjOBd2(CYgFGp zNZvex3=-#55m6~;(MpO_*IG=LO3+z@YaI6N$ITp8~fV+%T zfNdl!c!|0h56u$b2}{rTpZ-g>(><(Zcn@^&t9Cn!&63>mt*$qz6dISNNLi7oHbhC~K|~0D5$1##yt2 zqOV%vSajkJ3j*-Z>_!cos(zh^{Q?F5JwP-$3V%1-vn=dA@h>>T{frH}{n!EUIGo7; z5$#mBJwA{i>B*_cq-Bu=rcuaVfk}nu&da3HJAczRN}4dKI#7mBD2p>d=fH3Vj%}bl znh@!826~4Xsm6YY`A3;b3GDxs#PRrO?3H-+&^BmRMF1+-gwG_f#ajAWF@w9~XV7?g zc4+UA@i^{*IRoH#B-#cYi2|5JU++gEsKs|F|6RAU9M`>uX0@Xib1M$OHKT5+A=O?O zK(q6_949N66=e-oI%W-AySN%o4_2|}|1+lpv(s}c8~*A`AuYQNYh=hWK)v4SXyn_>*zW<9c% z)?zn2bK$6D?DGXKuOceU0fw60`mM`U`2K|ukEh;8XI_U5O%#~}5eQ^8e~EeYQC4~} zuY%t`L0V`x-H!PkR}I|O9KGjAwe;y*{BAli;f;p=8<~SjUG(bYm6jD|yxY;ZOst>{ z7g$N}8@G8Hi3B`$2Rsele)u<71FRUPjFBvDiW{9F(C`%8pb;Jl)ikL33L^}_N?!%; zX-?+I5dx8|B1|YXpRs?76b|LG7Zc;(N}h9kXD^;P;q!mTHS{h2|KJ+3ukrFlD-+EZ z6C5X=iUKb;&mNEn?iB>NU&wpuaN%L4x3#lj^aRuKHw}y0cI5kUBhR5*e%PG zG=<99Q>6pYWt=j+yE(`Dp;^9l-j8<2|BBqubsn4KH2Vh&aA7JL)av??sYrFKn#$$m zPk4sQyV7P1ELoJU`Q3r#*mANrK^XYhz>EJyyz>{X(J6m>|MGYiZ~Js3udw@WAQZ5Au^EI&ns;|aDI|1qzfo3wu<{NN3i2hP1#38azCla0Z$o!NX55I55z^=n^-t6BGO=!Uo2Gw5YZP z63A}O>}K7L&262S6#eh+qrA+Z$Z{3W+o74nbo(|xl09b}+W|+*0Uu3PG%LYTmoWH@ zN4Aui@PCYRbglhNn+E!x+O3lA2mT^_N%97xIoNZ6l1AIwq>6*{{-9UT+k%j%BQuH> zB^n8Qb}CujT#c5)vMrj&IcenWqup6<8XAOm+hC#X2}bw5leC&n*yF>e7mOi~7-~70 zN9x0`m26gKV=_2XFqJ%}a2jG1!kYRn;hgFA#yZkRZDN5I0! zI`{RCaCM+bsPI5yZ!CX!5&x7jT{wvKyYqafXTidRu2`SyOH_KBk z;t--!paPhlYcl%8Bu_p%D@AUwwDYR1ahRNJ>W4NSQ4)mxNOX@u!H<9@ev{vIhnO(b z(EYS6QXXdxL`~`l@fH>Cjn$hU{-J4XcxMo9ru>VhVf2(iE_u3aHc_G8x`%@GvDfD+|Sd`?X(qPkBHjGfBC&2T{Tzlhoq*#=uzt8tU zMlRsK@t;^43lcIvNH`1rurw6zHGcNvAeE=xV?*cW!sceE^e7f4CKyP!XDl93RTi`9 zGf%qT_pqg3;9-`f`^lX2+DCy=GYK2>$pb#jJ&UoTlsEx|2^C(-HF3&Yl$B%-VEv8+ z_|tW`Y!&;(<$5|0xn(FI^^zjqh*GDWRfFb5I1YKm)3yHZhPEl3XZv{(>!;?EK+dm$ zy>`2~SiKE!v7?%;hzMbzss2cxC9R#QvL@N!v$05#0?dBrvdH@0k{~J}K#bIz3 zXdUJo&j8)#Et3^EoE?}(Y{W8cj10afBz@(XCDeEQ^1wWdvG&d0@vpachDX1~+?Vkc zoY3mC&SS1~G@rJB{$Ni|r^iP?X`D8c=kcw`{0FsJ&Rt#5nY&pdDlWvLUC@TU8 zf+~lGE+Yc9&tN=V82Q%9vuN19|0WaisZxwE0DPa(qU@7qI@#Ma>PgNk6lNHEEw9~T z%-UVWdc%3fD-rY*?3}u?t7xX~>9`-g?9S4CW;b?*|EG`#VfU?oX9GAWeNai+r(bB8 zX0xdQZQOwwNjHr{&U@<8dxgxd4C^uKn(bD*f%f=TCEz#3-|vNVGZws9LiylWwNLfx zF98!<2iSB-Hy+{PB@jMebT?HUIRvcsMGA4(ajIFLxas;~YX8WK23dK!*!FAeTQC$Z zEPktq688m=6LdVoO;2EGw`7h%U$gp{t10G`zwrMJ?D+4MLoqM@t8%DO?fOWICc7*ghdJ{1r_gdaUTRkJpigL(TF?Z#AVu>! z9E9rGdn-76o%0_Bu*~v1)<^>$XX9~WVdDiAMIE5Es6(e#2=($~A|f#uYa5L|4T=2$ zJm|KAIO2y7AIT6t&(B!%x-MDJvg%Bg+TTP#`L;fip!c_hHF;msIa*`iqDK8_O{C?Y zae=M~6L157epNNy2nBvE(+=_H3)X{cf^ zm+=kVffd0u3(gqu$`%H#TFvju!sX_o#FqZIhjj)c?ezj}hSsL~HV#Vhj$v18Z3@_VX zN`Q==EAvAu8!GCqN&Q6{D9Z+}-YRL>@SUjscs9$q{DPSbNk5i^Ni#QtAc07eL`KxI z1V1G+cmVSJSkC`!%|R)iYBkB+fKSEm&bp4ngBgCF77l@IBv1qKchzi6L8Q)T6+KG1$RGg1iULBIRN& zjqe)YC9^mfts&|8mTG=2N4sB6eeboLxxh30aC)Ri0jdt6<7fF#vI_6>!an18zFTzS z9cCUa0dEGss$;22$_=RF2{=sfOObw!T-R<4f!a2=I2XVqDXHV~u+Y zPc*<1z*ZWF#xO(o(5Kgxx-8$UqLF7|O%0h-UCi{!mpm{z2$ZS;th1`UTC(=8jxj*2 zI!Mqghf9hK#V9=bs`xAmg_{gdKjJudHHGoQ-jJgSxC4EE#?MT6@my&K_*Dr>JR_Hv z`FGlmZK=QPaK26c%RlEO0}p_((H8_$JL~x_ zJ+VwqznSx@m7zW?^Y@3R{`KLKCq}2g*Go>?#l`cx_U$N~Hq7~IP;egc zpJ$L}m#vP=gs&5poP*m!-h|n%E{xE8mz!a{PArOPDSy$4)f^?ALvOTLLDZ-%Z1b-L zBEp7mbz$C!z=_r;u5{L!Ymx{7;G=ZwH$BvK2>x6=b1<~yJQE(eJBHFwbr5;BffoIu zl877sUY^>h)pj!(H)?(ZA?;twIl%C*<&^b=zz8j7ccoprhkz6PK7epOe(OeE8Sl?p z_DI=yJ=bl z_=48v)z3hM#vBAcZT-Xt^xh^XGv^o9+ZIz#*jM_w%@y+Zec=v&KW??@-;W!?gytG% zrRqn(mv3EX6g*q0nbiRpJ8jqQHc!1?MGC-jasGZI`2T){#=jPm{q^5hANl~(?@o4g4L$Ob=~c4- zzIW@`zwf<_%=5EUfbr46b@($65e1g#cIFa~ImkF=PprgHxaAp9(cUDs5$D(Y7@skc zqy%7ZXwbT|`jG(4;AA|S6tg`He&g=%micw0?3+e*E>U)NKJr(l8|<qjxgyt7?DQ zGbR0P&*Z7=-B?n*Xxw$LqEfkxw<4qgs8cu&X595oo)>~K8ra6cjjW4ep^P(+Bq-lm z5nk-YZJrYU*&YMT|18D`9nW#uD0;xG&DZ`=z!3phS7ceHyCx9|WkoFUZ%y#fxyzwd zeK)L^O%9p`5oi4yn+oR%X9>MZ5q5q)043)WX6yV!_Vs)YFx<{Mn(N&T!_>J9*ww6S zpOW3Ayro}?a;54ZfKJuE=uRSr2NO0%*_21PR(e|jx2c!}M_-9JG#^NDk}v!o9AekL zfbXzuw{(25Gd(>jBn=$#8*1K%hwA zb`Nx(m_s9u0&|32cMD?Lmtk-zwC{@>f6_P5LxO4 z7`wp`JWHJXa3fd9EkO7Xa`Y!;nBYJOlA&Z@???Rc6n}YsnHu#oQPBNU9lzB zFB9Qi@j~`Xg&voAJ+b^8RvCeiOrH+4+h|hWZ~(TN-}4|8Z$=rl!L6~?3-Tkb)XwGG z`$gmyqbae-8U&?l=U0w+v3SNA2s@H{p*{Eq>czS75THZSL8X`Dz){=;WdnpVX99*F zNx6@746lJ1%@TlLfr0)q_r9Olg*X5)>(b5N?M#gm)u3iPpy);&jh>$p9$L>y2T^wV zQd!^q*!v_9V(JyXhmhlX{{+6XDeMRddIL`i|AqPPvWbYuhN ze40j1CA2^KO;4>V>-sU@yQ3fJKHf_{TIi)`f`&^W3RV{dpr8?)d7t*cX6BJOx*xZu zre?L&Z2;B8@~|qK0B^DqU^mnS_$A5Fs&~|MAjKyJfTkt>Xn(@}e0$mO`b^8VSF8$f zE0zHsyU)%1SP(-FH^ATl4EK&FMTjU1W9(Kq=yR+i(siQ+HoGP)a#l`)rwPTac&&)WZ;g?0+o=lR=4Wqe3 zXxbIl1}L)=_y%e^03bcMF=&k`ec_V=q%lErKu>}X6y1^-;;MVFrx%+;2U=|1-Q6Y$ zpikSv?+UyWx<9cdu7x#kRM^h{T3H1!Z%t&QH`Yz1$RK+nwF&^P4eHR)d!dd5l}ha0 zO9PKrE9=*16^tL!LN020KCI~I)OS%4=A#i5!d9FTs;#dZn!jd5_7G+Z!MZrQGL9=} z2FjHu8`}c99lKK5__ebA<9iWmZDjD3$vr24R&^(fdoO)4OMH6(0nEPG5*-$W7&<@! z3XcaPBzCZ$=~da8bnRXR8N&!wI}btMe&i-Qu-Tz&2M9sq@4r!5a+(Xk`H%OH=>Y)0B z9cak&{LaOWxX@CWBpUD&tao&q=#*)2VE?r( zIHS#C+G9}jNkHC1jW(H|Wb;^+gQ&5@toX92P$IU<=~d9N^JDnsO`X;mUR}AXdx??_$7a?wmTpF9-S=%&hUYUX~Tpjsrli`pf#aIUicgC%lKk!$VGro~o~2Io)T^Twrnt|0B{W1fxL!lOAGIwq}zQG=Xpdm#`_3K{+ z3yp*}%=SH0Td6yYvKq^u8~cgMLok-{mdfS}tG0FDy;{+v{l%OybaB+YkZ2Q_}_q~z{T8QNiaoOaMx0nP=N zOILJ=kBfr#I2vP^zE%D`kEsPLV^G6ftf^wu^}1izzLQZ)euq8oCP{aVoez8mFzQ^o zKL$)9w_v4jc@Z+%*Xu$L; zvNg%b0yNB|o#~pI-XwY>hmE06`WOIzpzQ`Da7Gc47);?T%t>lLz~~!6L&E*lYXjRR z44m}~Fp9+?KyK_NVZa;$icY+ElDQ`8_Rb-YoJAjD#l@OJuiQ)^LshE ziurXhDq&ksbrgLYla|$~*rb@ETyZ3U@iBu0_5s;dm`_FDbiov2TgpGrGV`XUU zvU}cd@vbp&rS9)c3dCr${2eTfAQObq~gF)`MMFfE*<7=39#`*Zi z{-w{#h1jCq-(&d$Nimu^_6`?G1LJezp^*>u++kG zR<$_0hma3k>k&gE>lmF&b?mX%40(qi@So4%7>RdDYPZDWWBaRQQ23J*EfLEYU18C7 z#H0l)Y5(9oIw3Lc3*dg*Ry%QjytXxNJl9YXn=?*sIJLiT`z7TefKf-h>wQ6~^LaRb+{sAp-?!zC*rhBRu)C%1KR%V<59!&$p~l&(Wj zN#+V@u&T^|K0TQJ)x~Fjh?zY0i5`iL<{Gp({p^?AX5v@$0l&Cof58w!PWSs{lNaMW z`2y}PFl12p&Bwo0#*M=j(*gU~nsFHfc{MNs>E3ZWW}j=?s4+iUCOogW=)Pp5%-nHr ztXLS1)kIg3umidvcbs4xDnoH6S`#btfL|QBzqNUBvw@q%jh-};^t3mdEW>oMLIwi% z+&lA$Z#n03?Wp5Lz)Ezda`9uKNl(0xniAkI146gdy*F1U8oU^vLsBwSY6y)QiYRVA zW@3z^xrG!31XW;&@MSj5Pn}CovxeQWj28yFH>cez-FYpBky^fAuTsfePRlk2m8x&- zFqB61XH*m_!l`aWPOJ>Y*7-F= znw@_oLGTUb%~BnM97#rBKi-tYZu>Jw!H+(Q6i7P$i6C@5WpOH5yvx0|t+SIdu$EB< zSr^$C%TE`GLcyUl-!V_jqbU*J<>L4rHK_OFes)$lzg|PaWjtrEr73pN

V^KHX&*Cw_-vL+!8n ziiZ|`<5eV|>cm=dlx(P;Kd?W=UGnp_JL!b`urjIGqv4Q~@a*h9d{lFyW96^4a^&`H zozXIIR{H=~C-pmQ%v}vKGWPm6#%uTo%0QR<&^DcekDvUVcbvV?qng86>rTr~wwUJL z!i=SC{32-{%(F76w9c&uWIf)|aXa-KbKiu`A?YROditaBQ!{@_qvx{mt{gu=bC!|l znl4|9=a;Ft;0W`>mlRk3mi3q`$q!Z=41Xv6K$?~s|MK&66_Nc<#%w>n3yzk?8)eFf z?FuExXQ~+BB!C6laFTq zL3N7u^nTmCcbcxzRi2F<&7f znDk@SkWP#NZ58vTwE6*=uCigzLBBn{U%!WQy^$4(Y>(2=lO)bH!qwh|R-JgE`|s~S z%V!!r!}rUdwFSfV_geVbfs-JUPYS*y-LC;8+; z^N=}VlKTzt{uGcPXX?E>v5M*JJX46jjIvIab(ULEuka`DRg&N~vcrw!H-3W%VU zV9s8oo6X}LypcWhpNNeU^xY#$sJZ2D{JyGRm~0EmZ%3w%8#?d_GD}T^$QYUCC_V8` z$$9QuvP|5w$@+cNxm%E)BYth8qPY0;K2Fi4BOS!s+k5Yq$2FD5#Ud>Tm3Rt?k0`!; z`I2~wv3Zj78t!lbeeV*HoUFB}N5j>_gklC$jQ!6C zob@Dmd5UP34Gw53Q;kzVpMWam%C8Hnxr?r}<7hhue@UV5|K?uO>(|m$K@5Egrwe#! ztdw*XP9wq>#VKx1aEyESmAkJZ#|o`2qixLf!~LVg>AY#-JRUr5PG_jtof9Hn3SM1d z#u>#w?~HIM`7Sspe@z=srDi38mfwILdtL^2%@$u@$TxqEyc}7$SeER?af0g=hSZ7F zr_z;_h5Rphzd2N2;o=Mh!d*2VeZNB)U`8gE_WS55camra50AGEQ8Hu*2R?iDt?WSH zLGr}>R)`Z@poq^wnlybui$$fndOCfFZYidY#Yk#?k&67Qk1*Nf6_~LZ!P~wmhgVuH zgum?N6I=Ra+@+uDJniK$!6~F~Jc>j65m7vVP?&%9LImxDS?|Vf0X4VvjAe9q7I@W6 zaXL~79K>Z7or8W_$a7mNj>*lNk>y86n4K#^Uw5gS!!vh_cYnb?6O)8KGR98TeU#I? zDKfLm{ctdiwWPw@XKSPhj>a@63>}ZP()rC7-~YH!gZMVBf3&*`&Y(orTjoOPHV+2N z1~!PIjtG9oEm5+o1VqoGn#)C30>84$Qe;0 zmp4dfHgOnLqCDmXR_5hJm?7RAXJm|J%cjH@BH9lLUZ%;q0Iii1r4l7l60 zT@c;A?T>*+c6YL=Gs2}LKZFZDSWXmvga1WtmJ0_?=md>wD#`b@NN zyBb&_RIX>neVYZAQI=`#F$>or?hteANTNy}w`!J)wfrhRIR#eFSR!PViwKF+!ok<1 zw9Z-;Zk0w1K01sn9x#8ZRe1P3>c?8&`&#g{|5rzF`8PL-UR>u>s zy`Ba;xhSEUb-Lns8Pj=Uj=SaZ}N+u)HKewIC zd6m881nil$ffwvX3iPzqEdMvNTxn@25h3EFVpja=hw3P0*sKqmno4|peg?QBR8(Fx zZ{KUFr`;VYR5q8aEvZa99+{ElF1oC-|LPdU0Br|ny$B&9_!;jo5tI_i$Uc*h=``Y+ z`c%y-M$l@+@|QcW=wqDsbkOlHlG7nt!6p2Yzh4c8hD9%UDtF85GlSR&cHsDZ)D8?` z*9W=Fw=boH=ldM7YvEOH#f9~szX{o^goVyO>+klXhuSdGdFXe(3|7<0`X&=nN$6>M z5I2AActLxVFxmcf`~hEJC29}MV$1=8$}<&}&Ru!6D$7;^SJ>neus#d;a-)!Y{CN^v-7LbU9_AsL=#WlQtB!ZcY?kOySIaI7)0{V~RVK-@S5x2A z-si#c+AR}}!u*{`jrto*bLM}tx7d=Wlt?z$GF9zzQPfUqd-Lqu#FgxG4N7kS$eOubPnDkMs?`#`fb-}b9&6oaQQ*b%5>V*DpK zCQ8$bILA7$wFA{`>N5_1CtRG1p|a+%GaVcpBwWbz+r@*9JGHsH;dRb6Hao$JdvPlI zZ>X-Axrui%|9;EZP;Z%<&q2r?Db?#jdM>>o`kBQLL5T%&LprnmJ0Og|(}{s0Y8M_+ z%ZVDp_X^9dRgabuZu1eD+4oTvfN_SkpFp_BLPJdsZ3hV|5{aBsoGnpGjKoxD9gB=( z0V!JSZh)1HS+Bz}^LsNxM#4}sPx>aw-BH8q&ntMbjPk2l+(LrmMPh}8bt1Fv9)AT? zkF#~w2j&AXFzl6iqTDI~fq>j1BRdPx7@AWwhw~$rUusfx^^VtJeR8XZi>pUy%Fl!{ z%AS3I><9nQDaAxBV<@814Cf&cLdilH>{Gs>^L-m`NfX=YpOSw|tY4z_)pes9kwJ4;HfsMu^SU_M zhvlO6tvx6iims>Jt$SCFN9JoFWA0k?km3I4`uc%DQAMF;=*C}AxqzGDWIK|tGsS=h z!Iv!~wg`@vle2vm&+qgrcoF|P6{jJa6{ICDhmR2>Q=w%@#5IvDx2%l5Y5dVbj&sgy zzCs+~f(?I+P&}j#GRFX<6Rwe9Q=`($1`v1WmZJ?deZBQ6tb+iNWD#&jfOL4u6Htvu zwYHnrk5i#SL0vh4C3-UNHU(mLMJ|ElK({WoFE$~yU4cnO2$d218n0WcspF3Av-zxy zCdO+l8tL%aV~EOsw{`TdWYZ+30_e0)fQA3ZKdB(V2$kX=z9{n9-L=)X@8$P>;PEI0 zT6HySf<-{&RPyFn)jUV@*+d|PEX93HpDZQwKY4zoG@Q&z0YBlh2j{t;Kr4s+`HaCa z7p}Ze#Uy_&=Q7@3!IQ7&d_bQ2#ri-7@;O|FUQm!6rF2V^caH>x&*`dW0)KZ=!A3o^ z+2~HJRx2?`N;HU1PCNQvKPoH+RjBUoTfW=0zS-|4U*@uN%t;aNz5wt0Hq6M}oNc8q zAs{gD2JO|>NF619hfq|8P*sO^ZR=dVPHPh9F4E^t8yj1nMyo!!Xe6<0Weg%fio9$y zPpLq))o?V(pqRxGA6E|dn*}JL8%T`Ktym##bjHQ)Y+_XI-90@|4pw?2F}Q&E>^5pD z@K6IjB~mR0NlTe~cm(#heNRjPES{dOrE+s|WN}fEmgA+(^VUaSW479;lG@w~|5WC| ze|D70#~)Gm$`$JzYF!1`A18qs+Fj?JyN-ak9U2v-xqUu>U}s?O?0`?i3sv$s#5(Q) z=uq}z%Rap~(L3z-yL$;l#B|V6>{jE}xps81R0p^PC`-#tsj5Q?!t@!tRIjA}WC5Vp zA`OQ6-Le=hXsfbcy`{y9CU<#S9h#n%CFv+xWjilXRw#$gE|dPyVy09^=Lh{`SH4=R z;VqnqisKIVo&hL~(F*Y4r zgkr&Zyo8y6`k_$=YfEDgmEd=v^Tq4jRM>SwABg8~*iE(aKMs|Nta7J-2CUb&Q82!| zL!9k}_?_5p`km6wwQ5oRLBi-QxAMa#Q1MK~_QL&GndYdW9ySd=113k=j$31%x$Z}& z2|~b3Aek?p9W9R?S_z_{A4doyBjekv3unKIiJ}+im;}`Rq#T;+><}IcC6AZMH>z%@O zk2W!_Yqj-l5;3t_vt)v)b67M!MBI^^f=@$fc1R1KD1@ylpm{P*lI zZ7=zQQD5hzm}}Tb+t>RVf_UK&oZC1jYUft2_LE;Ep?6zSUVRU20(J?h7eLA9m!Hq6 zmqXcF{chb0SQ|>PJ&S}y+<5x*>HFO#te;i(X8y7PK|vZHCxC8ZV~B z<;C6pL%=v>JCrLlbaX5yjy{fvYy`H`kuz$({#2mwL=6&Iceo|F-50N8_>GmqUM?}U6Mhf@r$)LJqZu2dM1wHb zvP(xwL>m1ZQ2pPnagB(Gczq~a3-;!FFd3!n+XjPKUwji_Y6ib*4>s3HAVP%ckU9Fl zw)l|@{oS9Uq5ia01FHShVVLDS7n<;4F-bQhX(j)3s_Kn156HRT!?3>P_?Cz?wcTSA z51E?f?Hdf-5BP5ZAWg`SU;H^U)B^kD&A9=IFsU!XB*gH%&LcxY2RKz5C*bofv3qTS zX(~hRc3gR%m=(qPl?}6Qw2J~azqKDJ=sUn;!^H6Aoss?S3kiya0!t$Q zsNz`v3QFWgzR=OIR#{?wAOi7bCMxt}J2_Y1BXteQMux5@bmbPK1OpaX zzCV7bm?))kK6!h~ICAf2noML8HoTK{l`mQaf1hjNTKPyw)cxWiFs>-5^vvlg3)C&B zP7|a+GMe=+6rDZ^&y?M`=HF*z$aOJz&I%<4B(t97LGB5pMjgXhqQM2{cbYqXgXGFu zYUtcxw+>-KZQe9Yx1~wkO!hAXBa-#^;^~JJvu{vU~~_AfLtqW@^9>&*#<+8@r+7A3YERnTOASv zL~i>chy8-|b?#>=hS79Igg8h@>OdNg>$Po1(T+d5z_BV{?@O}KfvZTrCqDV3W!q=8&X+NExyWdo+yTA|+=Oxr+_zhTEdU z0%mUBnv_BxLNe-`L@v< zEx%U+u@v94W4Lil##7wm+vSq}z&+f=33t8UACxrIav<+=za^RNKR{;}cGG=3LOq?S z=&U@OhELxYOCkH`G{X~ku78#)KGa#2o+s{WpQL}19wk$5x(RZnqxk8TujA^nkHQcf zuqR9Sxn?BpvDgvcK16+jD_b-eh?|rvU_kGRo&(B+4@hJ&z4E}-O%gtaI~MopCr0x6jZ;|dYsE(yOmP>`NX0j z^|7g6zqGD7-&J}uxZjF*qm$M1|LB;Y!ZT(`1z!QlI z>&i;!`+t|x&S-o+7PALgxMGK6j^=t9e##f8G;*S?DbH8;_*ik&m4N{T1uV%LTRVyC zQ~}eeY?g&KPcuM^5P`xbi)AWiExw8dMNjw8V7>#e1#iEo8Hq#4quhA;59Uf-0N874 z3yOQ7YEo6@Z!rta31>-(0c{GdKqOodC!51jqgZSF**-78iM3_v&L7#n=OTR6CIr2U zCkejm8~CoA5!82;w10I!4%YQx;I2*?#$;kp{;)R`S$)(_5=6-{`eawA=5ZqV{8CyN zJb_r-?&T+etU|s$*7Xvek0`VN1|6T_=f@r6CyUujwd$MQnCZiTA3{Kl6yc`(GHx4F_FUIlULLqL;r0Kn}LGsmK*{)Ii)|XGF$Ee`bCr z&vQBU?jD{Cg#Q*u7+tWJ%pjiVo_oFQxOIk!7!qVxqF6tEG7$Dbzn!J26C!eQa^3@* z9y(Eo4N!-n%tL&WhHuXG9_p|`O!|@-o6M4;O8aZCC2UOl%vhmX*s>v~oq^$>=LT!6 zLL+N8f~onicB|U@H74-x21o+4fr3Zq-AKeN(cLZ#Ri&}#HIqR^okBjY^y+5`0%D4arxZK(64WaLRsyP6G4*l0rRkRU@Zel zKiB2u|LAfm%pqekp8!BWO3b!g>>c_ljjRZVYV zQdsiIhG26_s`zHS@YY zxr*Uv+ZYzEKp*IN(5HCHdg+{O`Z}c@_mOd zv*~@FYy7#n8+6?wj!7lem*q{niP~UAQK`3k3HFm~wZYoe;i>EN9UeL&=c7E&<@kmE z%h_0+r!@yqTPlJ3kFDnObUrb)~8WcQ;ii{2mE% zxN5TBvfat0q2+!+l6XapcK{9k(GFoUVd|@%>7g!PL1US_-#WhB4Hs+ zMo`ah+H~*3-qz3f7Iu-_2fEzQp1ldWLABORR&^=3DEv41_f=baJIAatH$)0sE`;u% zJ|b~;VB`FO-%q<2^4@FV2cR?jQgMsYnDlIiN6kbbMhVTDUPTi#UI=1&MG!OAT>eJu zNh0OIB1C5{<#;jc`9T`vA7}}v3g9X#$d{NOpm*!MfPc?##)IrD4RiSL{g(vS3@E6Z z>$hh?Xw7r0t-R?a+o>PGbbm0)4 zsZE>?q}3B$7z$rURdAcjo3-_&^V0y%rnA4k4e|y~I%FKCaeTSPJFX#?3*T8h&U0*R zJ z6ZrU1d2?b_&?>()03Ue8>_o|J)#X9ios0=H7M-2JY0QuIY!ZZaJGk?jJQRG*Ls)}< zHzkkBEV~5Nm(Z}4l9KWd3~U8d2V{778p}a80JjF3jE1ssjLA)2=`{0<0I3hKnu>Bp_#5PlvX?0w8X& zZE8je?l?wR!C8VXGkB^b4ikRP}wjeJs(!pR*q?A(X6R>0$Lo{w3a83(iStx z*@M-r?2`#5BO2)2wh|`>p7Y54Q`e260_rqNj3jq# z9Brw&jz^P41?(4@-!RMZ(ZeJzX2g;-V)F9*P>p)r{N`o}PTg^@;f>f*smo9D+(WaL3=K$*A0X5I3u^xg0)kpi|ZjP7Whp&jmC(VM6T#A;PLI z&8qyB$WvE)5y*KK#dOt}9q)?iA8af+@fg@M@&X&h2d{rX0BS(1g|<0=aVHBZglJ|8 zVWhh{P9%6gL3aka(SAR%WF#HeLGSpm(rg7i6rn}jA&(XO|1|fNQB_4f7fw_LlE}f zYwfk>n)7*{IVTr_v>5}pvG>P!=Do9h-^-JI{#C~Q!+LDE>tY`{E;%o18DBS05y`-p zTV$Uj@zZ!d_hSY{#mL>x#E)nTq>$v_pCXtNzmYulsw=F79FR-``uXJ;i{bZMv@v5z zQ5NFOx4#JvF}K%0rAGU4h_>K--1G6Y&!YrVn4HoX1BB*>o}CFm$XH@7(8$?tq`@_j zZEv42MQJAma$@Q67$3nZuq>Z0{|QwI9Z3mj^t_D?ECVI{IRP;NO5oD1wZxMn#~tP( zy{b!Cl6c#Rr@S=vt1jp*dmOHR=0H53s<{ib71Xb>Z_Sj5XO_Ul>n3gvxf1(cwrmc$ z1XBB7Ga`yzUKeug-f-xKCLiwmPMY=^U&$u|p3`~9bmqp_yub6&2|dXLB@?Z9BU2zM z?plfgKwBe+EBYd1wllZIoPv)z8Sqg&7w?9(&jpVXNF;Cw$>=M_X1#5j=`Rhz;eYNr zr$8y}0$cdu4>_;&AgDrw&SC^i+fe?D+q+}Ha?H`DPX;#Y53Cl@9d-e9(zE(_qG7}P8bwb^_Z z%lHS!;clopXJ|KwQ5AuG*9O2RkbD?k+*w^IB-~deY|mE#i-s3*{g~6miR6Bt4Vpb;kk#|4pAyDskv$*r~Im5>7j~i}HPP@9_)j0?K=#QEkzH3?ibm#3=Us*+xiiMRcP@<**516h)pJ`c;hNI!6yxLL8)|TK zf{2O#AiS{?!#+uE9V$M&+IHvBJIghBnI@Ln0WP!p7fb+bVPFOz6g!q#68hl-_+kTg zpXG}Jx6T}Ot^J-J!->*2ty_Ta_yl_#)>Kh()6MM<7oVe(lLkmq$ja7iCSb$s-nvYk z-v{DAQYq5sxl$deLEytudL>6`Ciqu6yclxvFanAjqDi8v=VBeK zd*0Po+O=FK*QXd0t;Ff$(wx4fdaQ(Le9RQotS`Erd|FEz0Hv?jg#r0ETKH{&Hc&Ms z_x$p(8Y+^B7ylE=rwmr+vfES z<7K-SF6K0b6Or0m{hjflcZt*?-l!a=55zNNar!DMxEYz5ca2+M=ko;KNZGgF5({TY zkJcyNESOG~B)W6*@)9i#h;BBKNr`~2u?0G%84uw#t~;jspq6cdxQAd?5s(khgE}wL zN=hN_I2$ZS&qwn#vyVXk?lkzy^J63Z0J{S}wFTZUY5^YY+;3$(Pp-pLBr{nRrE*e#G{nMUdmp96H^yLEC_$vp!{ai=o0F2HqK*h(w$yaawKr7P$JhTqT-t6BCp}Tub3g4-( zo*zNqpBy4sh*aBH@o=1vVd1FUKp?d_n+~Q=1YaqpWnl`}q6s(sIJ_~O$!z-sz1Ooz z+ozEjPUsKV`gbriV@^yY;X@!TYou=jJxp_Ov9u4SiHL~oK^={GAO+KipdaZFDdKC5 z218<9X>Dx{U^_in%jrXll}#jIX^iB>P+#Ow{V4b?s=AE$wz|1lgXrtutz> zrVp>Bf?aq@=+=s^4z#Qe)QlBXdn#~yfqvKE_8S|&Krt5Cqs{4!m`W$|h}WPF>0r7H z#+2Jq^73?jwR2%zP$o6fBETmEYaWX!lM`s}XBWs9l%Y`s;V7 zs~T0>B#gIE`hY&!T_-6m- zXF7iVo1n8b4qz*Cn8+hUL_~z$_j7lN;VdV=^%sfqq^4Seu9^V}wg99ON(5Q|gpMauFBcCSm4NwIYam_a0ETAj0fwnMNtSV}i3A((B?g#LIgv5P?rT)%M)sJj1-@ zRa_XT7T_E>gY}lnxzG@ZLxgS^pKUyY9|O99z5jhDM(Y0WuiM28J2_r@wOZ=x4n|#n zQP{i*+%JFxV|TieJzw!YYz!l2R((tw*Ilj{IS1ksyn`A?X%t8Q4<0?;o)Uzf-y@b9a>5swr@$E|lnH7DD&$^?0p>SG&A?>RAX7d-8Q{a6!`sRis9lXA+n%ffD z^MO@tOX_2L>GjG`951(D(zguxbiZ}T)%=A02|;R|dv$LB*UK*bqtqa^Ic3z56VFKn z*!L9l9~WbX&e?Zb`n+Kun^F2H=R74S$h*X%u20HG&+bFwuk1E=8qbHm`@5h0wt`R8 z(k#@~IKmCF$q_(1LR8{|e17*vH5G^^ZynW?zPFot>JwvI$Kgt(C54Fpr~XQ?1nMsl z10AhDT#>pFGkWealy`eQB?JoY)hg*um7eTB>tA}5zO9JRB1-t##d7LZgQBy~b)yW{ z-DvzLS%(a31mgOED-aeATmhpve>L;uwt{V4SMQ56OaAKUAHBdMV^=FGDC6LUs0JkM z8i&2{y&Hq%QHGKN*2mbl4rpnM^fCnPB~p9*6^fbu$}ixMpORm(4TQtEermF_lP_kq zCV#che_-^BA8+{@FV*t1LQ=p)(S=`3&W>liKQDB;#Bs*gub8 zA9#%*MFNfejp2NSs;^Qu$5h~lD)#>GwEwt|k|)v>=7(#Jc?5K2?DT2|Eju%(N4Rmp!e-ZV z?sL3^V}#F0U8cz->ZE9w%A2}n)DnGy#3xgofZbhO`}wDT&IBC^vUedBpdW z(r7HfzQ(aop`zLKGw$}N}(m}4rBqS1g(3@DP!&@3*`%yDh`Pb z!XKTqL@j{U5`ei54W1HhI<3ERyvV!{0QLtVfFqVqsZ*k1SUwv>00 zXUI6os(>=MZ!N~@ek6!GIurbXNA(hDv28oB=c>I?7&cJ=X35f1X)auKQ&^0wyY!@| z!oVyHRS|gydNB1$wD%F0mdN2ZIW8Ry0w(!P=UGc??yziTb$8V3Q8AyVOs-je+v6lY;O$VFFOuTP-`(HhqT6|kp8oNB1?0Od%=hRTo0sOz| z@ern^kQ=hrWQEK&&mR9+K_%xu6|!P_|Tcij%x4;vDZSZ zuGV%>Q~kpCZSt=FXaT-lKPCdv*3>_8MtMgiNSdtk*4B4=yJuG$mkad0qzsqxz%T)Vo1O?NP2;&cU}t?Gy0O%^ z@K%@m)T@A?U>Qzn^i+Rb<9l6VDp1)zLCnl}X`lJ2nx(;BA|9ISHhl9jlQ057&8N4j zQb4^~0mNoO+J_V7(0+)57BP+P<{P`|_gx{K4|V7WOiOzcmS6%am0Bi$+PSo(!}z5}AK|f73zPZo_%1i+OGYF~W;D)UI-Nl&9{V%H z6zry2w49O;>boG&Ji?~{6qzN5__9;Y+B$2JjMf+*nyuI9az{NtmY;kB zvL-?dc}qLmP|@9x+M^uww?H=~OY11WbEuIS1a`w1#?An9#^!>EUS3k@GBIJz!uMTX zf03%SFtCjtD;qyO7CE;$CY`z(y|=-<^v+guJL_;V&n54<_)Q;yJD+;DxKN0MI51)e zO#jk!acH`6lYk}w_2p$6HsY(_aQwz>zG0}V@d@yiq9g&RKB;ywq?~aMvmE zRJ>>6Ev~q$o;ZH*3G@bDQPC8cTAG`vT5}{w{ACBC0k^}_;;|T zOpsDJZ`h7!h>bkoaY=8h@6uTdS4w$U>5dOU!6Yo?MiQZ*%!Jp2;p<7B)`v-A|J#cs(n75qHGFBpSs7+ zdZf3(vG$)QAAJM|&o*%GWBKC3`M}Faprv)Xqzo22t61TN1wJDW^m;HAF9*zuPsTf5 zTmu(;3w2!{!a&-uGIeVDrSo+Eje2BPQ6)h6Fp40I5evtS+5!4vW6a0G}BDU1v~wAxFBxSQ{CFLzY~hQ)Jc&5NwJ+79D0AkqJa>Og@aXY-z_sg^S$?srqqb|@qv58 zJrgqY0!+TXrANT2eu7Gmh;o7zRdlq4YWM{H>A{v-LDWr0PRh7ABD zjk!WMAJ^tgCwpV{p$dE4>rP41SpG7$Jm(h%tYdOGd0F^_Z>_rw!N<0m!iFDfD7>jl zs0r}K0iOTJVc&>4z(U@p-6B+)$LWZbQX)CRZ=pwAFE`(3LoO|;H!|Nx6C~k*IllFT z6*em36UT{&-mg-gWMlVzAjTW|&c}r)%CLHbWp4c4E|uW3YLqMmwGKnokd5RjTzJ4a zR>DtoolzWNVH8C%)SII%SJ^U>K6tE`*0JP!P6$bi&a|Pl2PE!?S?`M`jy^i9N*BrJ zW<4HTE6UcQB8;|ZzMbF&%YU}mqlt~w&b=_~Qi8K+=KXs%wIaSKp8;YPPC@+gp6&HP zQL3Z$uUr&Zb#A3-y2$(KZnE^k1`}8v=gGP#SR*37Q*A`+p~~$eRHe(Mb37@xo6xYH zS4n1kvX0aQ8o8gYPCbU_C4xjRq+{%v#0j)&X=CP%H&GW1AEm+gTgR;6ERpo|16~3p z`{x?xiO1?3tF@@I1$X>mflII#r?6%*?WR{ByjtbE{Q%fTZ@(z%b8z7k<&Sg}rZtU* z&NhBu1}yUBYL1&%x2lN=_NzBq(z%*$_~r!sE)JfE3UKFjlfM9Wx1wm5yNZDjo!mJEaDDsa(B3GE)al3^eoH>OS8^gs>nRdN`a-`j zKOT@YifQtwW-KlqkFm}AXB3?97lOBhC}d}{Y?v!gA99fzk9;D=3{wo~cr9LXfzASZ zpd*6UjVeZZAdV=b*VfNdacwJQkHa&Xn5ktU6K?u6WsXzLS>a$3hN{m<%l?8OwZA3! zcjyzmd5d!3kh&k5Wfv8UUpQ+DA9fiI?w;zsg*+bCk2&zfwl8fnipXj4r(?H}&I99; z-wsI^taUd=V=|BX^!B-d!-^b{r#)-nKt9m8qSpOD)tuasyeoxFiMWeXVp?9tia5eb@a=%`KgG z4`Y@ku5Mi3v&Omi07r1-@H}C|b^l}dGQYqrnK3!j=~5@fzLI{l(D`Q)?rqn%dVYPY z`8r10{s{9Se~zZ((k6|$2i7K@WmK7f`|55BM@*j6KV^cP$OJlC>7tEss@$y>wBBE{ zHh#EgdGq>O68a7$1@TKmKMVC6!<9D-_QN1&{T9juF z9oC(j{I!|^(&FRXBeEAP^%mz;I#=F4)-hcW(kPX z2JIzaJAKBOHd_x>eU%|ojBrzReV)lG8X#IXC$j(4Q| z%xC^Wcm&@wM0pr7xyuWdkTEMfv6>l&i)+<=wIem9Nn0&k=!#AeWcPWj^14d=4JZp@ z!TL`1ChH$SU2u-A*Vk|!u(X{NXRxgge{gKql8XVIc6v=1W(P(4rz?S8Hnrpoa3UTl zubHiYzUaO?U?-Tch~d6p_{tSf^Uz`G`iK}-*gp1 z({-LxvYwI-?pZhPP@J;A{&kZ)I^Wv5gMfyDl!Bm3u>}v6K89wEb~Df$hKa(Z4*#clklxG~eYzu0#B9WsO@C41b5OsI4$8dc!J_w-r+_BI7m z%~vXaZ$q2~-DcW9TU&MO-W?I#TNmno#66_m*lg)NLHu?~iNeEt%4jSO*G&`^bh>cnCgs1ETf2U0aeuk>>@RRa_ON;jRQ!s@H<o^e}qeM2h4&a0$A>zp2-!_UpyWItc6|^B9MNC?70}fJ<3c% z1IC*%S(g3;;^EzjhOlFrf3GtIdYwyT@tU9F|03?;BzkLhe%%C6iwRwPz@1RW zlD%WWFo#5(a!Urh_xc%(@lr2GoX_gxyx}944%5q~@0iMc(i-;@=WnZ>gQOcM3+@U5 zAsC~tO~^yUY2cI>BNv&j*x+z1W=NNJR1oe9Md3&uH81pV^9*Phu6{z@S=Z_n+u1J_q=T%7|c zdem>@PC5U9GO5s{SNcbCqG|2Jn6B_{d6qwzW5waKMVGyqH-CJa)|%XYo{`q;5DHYM zKI52nH;33MXAKd(9|8fgnlf?+C2Jzz9G~qL>IoeHTKx4kjri zh%L4R?J$Vk2yCDjxMJzTRRS3ZrcqqV#n9UOPNbsA3=$C!^MyD$Ny|yyc}z z@#D4Y9D0L@Id_v0Meu0)eQ@joMzaz(l2Kjn-w*_#Ayl;i+XXNZ5!R}ggfHp$_nT9x zLER^ebxJh-iUZnl?CUd-3$eg*GIoJ=JX;pRrIY2N)@=S^y{Odw>iOH$Z_*B|IZtZV zK9(4cU0auz-yFTTLsLXh1Giaz#dKBIzxi!GTQq&nEg`$xPI8(6~FC!TT(L&)j)I1}6j6j0A(GG9DQ?uj)HMK9G>Uks% z8Q0_9wk5n=K$?(YQzl{*oN?3Q$Bs_({doQS*P~br+;$|XXF2oPR$=Ijx${Yonw!p(*jcT~fx(bMkq2ns2c=-CNvv z-Kb}2_nQp?s#p6oU)W3F%vP!*Ne$0uhtN-wI{l)Hc&B;+Qf9 zuoDl__Rqt~5gU? z%~FbtJAf%g8iAgi6^@R@^;=C84Dw-v!~467sBkSh)`KX6{&WbP~b z!UZju1Js6$i=s;bl|Q+faM#`6_vK&#fq|l)%b)^-*zw$(<*8ae*{sg4E;i|n?`o2- z5@scaqG23tY;AGchPoz}HOlX~ApK@6ThyGZjlr>j9cK6B^1EbG!C;)p)ZIu}bJMqJ zzJ2Rk%pebvmP@B9S+TX|+fYbo9Fq(JkKWEZ$wuk}?nwkzSX=zBrRWbfM_&x6#6<1x zBW9F*5GADA@X$1mgHi1v(7}Lh;|?926giXEQnE{ICZ>SJQob;e4?Wu*apCcr)xy6S z9&Wb)cgSUIhHsoHAI@X*vw|&Eqj8xOB}5-D?DfdF3_!dLb))7d93shkmzOq4%qEmR z$)ij}1b(+kyal{Hdko+6jE9sVYJuzT(gQ?1RDnbqcS(ZZ7{xHP0t19Ow5OgYG&xpY z*91PXz4DpOWvNwVz>_uj`cP(I879NsyQM$LV(Cl?ZSZdMBku>>9B0<~6=Aomt`@G> z{Q(l^;<9;N!k1;tMf*R)JtzkWHi|C$m@c!%)ZDLXJmk$&!&ey9IemxAlrbY7JPI%Y zsGtDeLnby63T6D1)-m?B?Vf@9!MU}Aka0c7QJ*+ziHOP9DxdYJj;G*C%~f-b6>XA-=WXWa~uX)=NMHJ~%eP`%5RRc|gi zNSFbno2M7Hosly}P1BaSGKIw_1X2k!lt#LmDp1-38X_KWpW6+aHU4^NY3FJh0F|f- z3J(5!c+z>4vfeTDH}OMjJ>)EbyLsd!DL;c0kdS4)*CgcpGAMhJGb~4|In`$W^&!f; zTCYR;fuWOAuRrw`H=S^p+P}@Y1lkP#8a7Z5s(0BkyBwA2jz`Nq#0^oc}gm~g=<28CUT`ER=+Tul+VRvXCHOn7Vv+f^FOKKaH zk0K+z%HiB$6PqO#TNjCEcms^0_BFgOt#jW(#Goym@$<8dc;=Vh*}$06cT-+;2&H9~ zfMn~NHy^K0U}IrPnLA-r+s$rnQv=&7FlD;X5!{nOw1dGS?ss-Xab;oCz5Y8m`+xM= zB}pw2xsL#ubs4E6EC0nk4u+>(y;5zfR67j~L z;m@65>^noup)y^9FK6zAMV2nEMfs4Rn)Q~KCw37A+3w-bf1cN)#J~6%Dj`BsB|*g7 z0jb;O8c{iHnj3GTprr*9Sk%Iw|AoB9@_&+q1s@q}5N!(}ApGL47&-9KGp=@@x-Pvp zwgH4zsVHvoq*vSK?Su2LdBzZ2xWxo5aerX|zj*Rj--`MRz^QmLLof`8DS}k$aybg3 z8tuQJ-#)r!C*-A_ouR5Vze;8rl=%mEoKcRYFmEc-$IsQ$I1teyeo13X9)}%5yOIdo z=gp0{4A3Jk3Zgi^V!lDV)s_JzmYdaR(Rg8p$O9*!Cq(ZoeIgLU7Ma+uJ*(YJ)&7Fm z<=39ijj(CTa1y%U8)F{nti3x>LSa2-P2t5MJuW~RBuKA; zqdRce1X?GiL6pM zU5epIFoF2ljMzmjZf~B{`DWogcdZFl2KHi|2hP130dC&_G#HQZG#jk8ctC$c#Y&=} zps2JYk|=7Xp(!+SA^fYy;QHA&5>!K_ss|%@24umDlGa-87nk*nH;Jn{SQ>pzf_1xl zT#CE*G|oLa!MkGq>$@ zKfAFriaQ5s8w6ez^LBV}^)3eZ~|@Zf*XF=0jyEHvdK-Wp1u6FMj$|%E(4G(K>bt+u-k}1Zy(9hC5`6X- z!?D7ox8&%SfdffRQ}LB{HzMWhNQ(XDjO(ADMu9dOhgBD|WlwJ$v6nR$);uk>f(JOu z$b)MswuJwEAYSgj55$BRj$z(gH}fdV4(x}3!sCHrh3(b121=1h?voSOrwRZu*bT1^ zdL_(jw7-4S(?RSsegV4!iD*LpjiN=NDEdnq6DAA&mMOd=xDGpnsL2aWnrWO=%3=u01YCgRfOCJp?bhYd9!qp)RAg^C25!n@cSY8`g*b@R>^6Hv5 z9gIErdr9O(qqJ6M$Uvcmim5O3O&GWj(O9O+0>i|$vDH36Taf6KiL zTJCNJCJs+L0bX8j`tYu#F!jQ=Fb}=12f@U5ElJt@NJ+qJgw5Q{{zFpDI+^*<{vIb; zXZHHn?=7HA6L_}DfA5hm69QMF{gst^aU()w%CWbHlHRy_^*NurwVYxj?B;rSHpH_q z)OTNtEv3=p%Q0?-scOn*jzG2&GMD847P~C8wq9xJxbH4-|7H`ncREaZ7+@fo^cpr0 zK7+sV`OI0f=Fpl9Cj>8JD;-X;y)aJIvt5L{XDi}m*xL)3Nfg#kAylKMwz7mH zD$1W|QZcl{H+Wsm_>ekaaktKi3f4{22Uy$7^5(!P?_G_x!Mxt({r>H0IUASm zc#H+Ijri|6jQM9!=)C#U0JkH{WbinNZf*X)T?*UcdL$SQ_kx+k%yusP-En) zA1i7dr#Dn&{OpCZoQO^N$vYQMXcX0sQ#ABiy#=Fm!|KdRWRcfDj zE|$dWV#Durz7RBN@+Hhk_vFTvJH%T;pScy(sZlSFQ;)UONQAmtW`Kjst*E|KVwzX( zt#^Rq{yi$rh--KMPe4&<2l1?(;7D=vd)zO1Qd*;j2A(&6Rd=wM^6#rKmFlLp}WUp|xVM7bK2zMb87`TYm?53I~OwV`oRZvq@zG|TU9FV$uY`UY7?NXGe`g_G= z5xp>Cu)_#Yr`wCMjY?ay9ffz5?9%$KlPs?7gb|bRtFw*p0_ds&?0AX+ZW|t#kKwu0 z6a@H>N!7Z~?@eTC;msCekI5!~&=&7$5Z@`yL7zYoI=3b@!-Q%bUB9UFXdgiJR_|s4 z7W_{EKW*Y(PKcmI#1|sIN@gSXWn4gm$Typ(Cp26z{s>H#k$p%y{xOHn6zFBHf1Muz zQtzH*8mQ^H+phF7)a? zNB);`{k!1CuyiK8>Mw7;(t8Kka&IuCWRzt~!k~RzrMf=Cmx0Q+h%t2lpM8Dbh6w1f z#~RPK2vn{wXM+U(jRzsi(r&&rNlCCa@ZzqgiD+9Cp;bLQgBgVw?9g@!ga>z*8{w_z z(IAP7yE6Xs+RIU=hP*r|H~<-J)OVeGwmE~<(edKH&+aM!2YF;OsQ%z8V;S&7pD+LU zU3CSpQBZ&r6&8tZhM~W3e3=ZZ9K)WAW2^6 z|141|;6Hs_CVVP>UEYS4nIo*qWYa@vnYUL(YKCnu*9UjuJ-QMYBZ`P z!K*#p?nc`hP#hD_MebMw_Jh!KCLYy`ndkX1V;LbxrKEr@1QI}2Gv%?Js(4>r2HCekz|9Oqq%J3aeP>~jjryaftjLkt3Ulo-w zIyHCqd-q&6!=PGoP$c<<#yc!2iBcv7)PqO|ozZJfSMwdV8jnnWcSi@Ti2>E%HV0yN z>lCWaGp~;fHtLRX8h%BHZiA))boBHHI2;a@ zy4(RGTuV2gTQp+g0hx6bK!z(cWshQ8zaiKF`n7?won`xR`Dag68xw^KNKBZ$!z!&O zh%oRe)wx*}$o;IQYD#zQV*73IEIC9IdCjqJ3s_~H4{>-4Uv1cVzd3In?*a3ev=~Il z5PMhzD&)mkIL>1y*2BJ;5n>}_V519V`GHLs>h4!rOLidj-HNn(=FPBuP*9Bh`LV+v zn&EVrzzQ)6A=?fxst5pT-Fa|n0Tr6h%*|Eo-2<&-=c5&FJhZMO08Zx^ABQ%o0!@<|Y}LcFj$4@?elFs;vl)-^+v;u~0JZE=_F%e^8G}W_9&P zAqD6y&yA#RYGQp16B z1ElNsjG&(|-CZ zAY4aFB*;Qvxf`4E(&Cfa_?4d4S`>9NC};RNr-x}56q6FlQs~fFo+xLI=hRzVCi>37 zMC;8skR(zrU04x_t*EaW_4z)OH}2=>x4m9Ipv4CJI%VEbX$Z_U{qMu@u9-%UIQiA7 zTBiWfm#ZUKpd5SEI{?V?h@Ee@2dp{Lx9bn@G0%g7rlOz#hHW=PBY~sjiWJbTdNbGq z1hVt8y}g~Psct_-5&)1rQ%|0UnLd3=_dodKq-R}uWRubc`xbI87DHSOqBPO$Vof6TU2NI%RuOZCj{ZcJD`h3<2SC2N+i?#;UjQ0bxXZ=%E{Kp5 z6|oIc=Vd&)5Fnh0l)S7F^lrQdXAyYkcA&p@4+AYt&ZwnnXzoUdoUtw*)-ono$}+ld zqZ1Nxm0wZD(V3pOOr%MnbdO+Lyt!yLXI%<|y{z(Da#F-aVZ&)Q-;hi^t>_j|Qpt`K?IbKPT9=lm3Lvl@xj38RA%!`XawL zvG0Ii46s9%3-#SuI=a8q)%Zro1pzZkHQkbMY|F7~fciUGB%tOxH^ z5wMg>1F3y@F>aNh*=Kg|7AD!(#<0ZKm%$lR)3N>L*7eVm(sWy`RAzBiBws_)>h z<%kP+EWFIAG8+THpWX)zJC4s zlUq`<8G#@)9~KVIztyTEoDs4qci5MMs+BBEDhITPjG`G z18uD3tD502odEEol}2t^8+;=sBJ%55#Me6dlJw+DM=)`m+?}%3$L^?|zM?W4L{Zc4 zI>@U%QC246Bnzm5B@aGZ60l*dMQCCnYjG0wzWhuU(16v&aIJxo1b-biS~vkC629z9 z;~y#eKL_w8^gY$Of{*_kKyc=QyaBW{46mCKME{&CaOT#4GdWFkrOfuf7bDt6B5wLn zA8Q;d{`2-AKVKDmZ!@d6?q9S59lj`_>XZAN!aM_@-5{q7e0)T~_x|4=4oJ50Km|GZ zxLT2NYvpQ?#wIlDKMMJIppXZ+SRnh0I{@VOMS?>a*cbzrl^R7WE4E8u!f8Rty3Wqd z8qkjdR75oHUeAkis!W<+gid5y-+T0sS2X9Q2S#>8tkdW&@T9^reaiCc@}lNw#)Exo zM3ZjGsur7|1r-{5E(#doD_C+hklov>w0L12L&rJrqM-g2^Pm?`%hj3p6+!d)Zs@Nz z{D;k_tL*zL-xX>>|D0P;ukZlW#s+A{90d>_AhlxLHJ)Ev5At6Zz-D8!Kp)9`Zf~+(W z`kehBTL`G3z>qBedcivPTcTkN5Q^=#YnCMDMg)ke7SJ`@H#av|$&Ov70XQy?yg(^( zgi3!gC$dF?jPzO1}3>8nmaW z;AgG2p5O)jlgOc5IN$?RzDV`l#e(WOB`eFKCIyZ8`62EYpc-p#F^LCoH*6z*Cqr6f z#TF~@s({lU&IF{P5z%CDGBfeyA&~cE4%A45NPoFk_Lq}lJ&aok3FG;c_5SSfIH%MRt zVYfL(twvwgPtTp>C-<>jE8u3OiMATPaQC8My*c9+*&pE&`1zD-#>e+wzq}L zx6elYJXVI`{+P5DtisSJ?n$Thfgy?zs7P(8RR$lVHNpWEIUPp&GIvaEZzE+O2hXy4 zl5DPHmUErvuHCOH*D2s5y6JKswcOH|9z#xpwmMl z)rKN}v}e-;uK~yB^a1B-jbX7fUC1JdRou?n!S07S3OMSGV;vvZ@m(qsLL{xNZw(^N z{ntJ=DM?`l=S^(MnN|13swDya0`{rBsy#I!J`twe_}e}942$0r5`9}JG{`OnwLT~th;uO82)?*6+b?HnU` v`3sr6|M@yf4303~O(A#r?`{8Y9>eIhUh?Ok#Gk@o;Lm+IRoNnGW553e<n{~F#DRVD?f)^;|DGE=#O?q2A-52NzqdD6dL{-PJLn={dC8}QxsG-FqnCd`Q@?JY z-^w}5S2v>4{2mR*U-?a zDD6#%}|po3`)QiaWC!gG^s}BapT!dmKUC_$rHBdVrwb}Cd+*zxKuP61?|7-`rTGCzdwY6WcUA3QT{GeE zjwj!HcS&igHn!pkme+47ZewUfCFepTdSF>Cb#yDJ;Aa>8pBecF+hjogdD@v^j}hv6 z3t6R$^gmTjvIJ}AGK_-;6Y7-xvf%fAx3L?_1=ecv;7Yzczxb>#HbcxZENkE>9@-L| z-P8ri=8a$0FS&L7bGi-5nCrDKsk62CFTBP)D8uif3vzvy$PdkgS_jW@K24OxW5~~l zX8wMF!#cbZjHFgOD!i_5t`DE4IVkn??T}KH%Bm{gVT zib^8oq+c_v+f!=--ynmrS%UWDCUYmOKl&XPv`6!hcPi|UcpZdL@H2$|S?FFgI=*CedGGk-# z^*NtjqUhoNzPW`3&dtq@QaX+d5TLtS=dk$x^)t=Y`T2SLkS@)1_mvOd`j8!QtZJO@ z0y~hi17mQDN!~RaPMn&SmVoX~OHGYOLje-+?d*!lKoUEoIU?xm)mfoxCe2wC+NF`kS}6lEc-TW>v+{X~Srg zmv+jeP4^!S^BBd{63l;ganYnQtwc9K<;G@blQsSP`~c+Svr&n!BJfQ_j!P4Mxcuo-_Y#@mIU1Z54z8Z?IwO>MrFP>S$89}IBQf8B> zl!$M3k{|4QwALM2)Hk`>k2K$gt1W+Mr{I2vFlr+6lvj>Pw&|LTnndS2GX32ye&^P# z80L98-7c(70!Z`m$y)Sk28C%cr2zQ~+*%xt|z@EP~x$$ij^2_g8 zZN)I{wec%BGtC7%{Tiz|#qXX?8+Y!NIuu@S8Q?RkP$$o+_OLE5PNWYW^0LOTN>-4{ zs<>{)9PeT?7amAIT9hax=BV_!`7yo3l#35XbhkE#Q5KnH&KcbEr>McN_)V4KlQoX@ z!cn1&26P%c&8vse=|Tl+Zq4>}aC#^?hFzAhYv{hVAV|@9DSm9~sZtt$?nw?A>f3O! zBx}-gr1`4X!@M%WZRwZsPK@E$C=%8&=G+A>$=Oc94+xqM0?0PqDW^=k&*ike2oIg@ z4aW)v0B5HjV^nkHeY?)^85GCZc(t) z3*9_Q9uqS+H|psVcH-ct4!2u6;oj{SOnzAjWQhh1;)aA#+H zwZlw3VxoRJPk|{raBO~_pf-?wZlHKg)#WaML-#F?Y(jySVao{^NZWoJxf}}{#{K=2 zvu|YIklop+&Ci%hy$A>FKm#u7PD-q)5uzZZ`KnuPqO;2B@8R(=Ov?L~E^q__7Ne{- zqAgFD@yRJC?UKe(f;4Rga}alRb^YrFc$H7stxk#?S5EBeasDE|d#xuz+I|#nN<}Kj zG@N2#VWY*;JKdRh)5}M9#WC+X>OaNUi*M0};^@U!lMH_D`|ok}s`E91=Ho@8hR)rJ z$qx5QPpF2iuOFLHd9bWf2YQM%IbYw=1NnFqv5pkV)$#To)P#@c7l$U*!)qR+W+x z5iY&cUHzqE*fa|L?!u$>YqZ~A@k zpm&;aoL{dzb$Y@E_~v0SUvqOrM^8v1u)3PS@lHrESdQYH(aFQb*4AJy#`WnsXB7Kb z{Tg93yM*^CAtNIrJC%nB-n+#Yr(Gmc)=|nd#*ObHd-LGum)rTU9|}zIC#+BLA6C~N zTt8bIF6MR)j_3{e-)SQiB@a(5B6GK`%2%cO=sh8sj6qUMZQxkWHa3)!^<7>zCL3$f zx4XJFd`xgyY7U#4(xKqAQic!}b)!L6lRU<1nUY;!#VG~9kIeMi)BG@OA)=zuZaY58 zQ#xBvKMV;D)}9JnbbZ${vn}MjlYtGpdcv+LiC<@qwPBeLK##2GswQqfVfH=douIoc8Xf$ez3MvI1OYi>s1tx$wM}e?o#st?7{yQ}c9E^I^2ac~B zQap5zl>avz+X#NVJ@+=t_whlZU`}(5H;mkd@>zzp8Q^kjT9uiDG z8(Ur0aMwC-^)Vm)dakj%u}J5%cT_^hHb!WI<(tpx?*J#a@A;k&u7wG_2!On%{arpXDPy>ASLk z%Qe>=nZ6s+T5xO}9uJ~Qsca1gtHrV(Kmoi}Re`mc{A1RHWxSv_ZeJ?(MT%VJv5^ah z-{&C=HP0DvdgEo6{XR@@d}C z9s9ZDrVIoBOqg&@GqWUqEHWIkHr$XKSaG8Cojw&I$Zz zq(TvUq{3`|1W#7|F^+QN zwU6X?yDXQ5R##m%#B2WyHD*S?ZOTF=&w~Y0(kb?Ijo#dB4d%ph0w!-1npw^ObM)!u z@oi(|E>dV2VBI+zD*0FTY~iS3^iekb#=jQTh$o1kMJ3E!mrP*O7OxP4>-z4ojAH>aMRXC~44$hfMzZ|Q@s zD4)_l0?~S>Mp_-K=_gIYuZ;NX2)b42mg+wmT3=o%UnWB+l$uJ@pzYwuGuW-vt84=n zwV7%upW8b&rzM2o^{(k{8vc?>0v1F!64bk0T*qkd>GdL)Tkj}Iqk4jcL2{2xInSSf zDkT$Ezr22PnM8S;+dWrcYJ6EmwyAP)yyySg88=7WS$Q!fv9Tf(D8-l{C2@7=a?wk; zreoS}+IErglbzQz?T6#=J<$Fwda8|monVK16>54*F!*`Xarh}om%q=Y|IjY$k=yn5 z&|MZM?Bu+z?Z))AJ1EZx<{Z^_cjYgyXc3Ixtk{67i+dYauN)nhy&@%bb@9{;P*Nv5}x~(pR2fq2TZ91Zv2FmQZeAb|Ij{bE2$&SzS$Ud?*z1gqTdk@@j zSFJH}Tn&qq(xt`p>+9i zD=GOW&K!uVR@>UqU9%}@=>05Y;a^{5{)q&0|X7(gI&SAER;C;Dj-L3<76-s*<} z{QU8VAg}`*CY#hY;&RyCaxBRBXY6~#W^3FubX_ZR-~SI82p~VBI7O~Dkbb`M5I03C z_uPZBNF3A6P7i8{g|0l5YbjUfjdQAGq~2sw)~bI-7@aUczB&AP=1UV@Tgp>%?TW{B z$qyCT?l$fxK>dH4e28X30zqH0_0>Jd zCHQehpIQJFpShcGVvLgsklicx113IX$?Bd z#i*JeKPZ)F759}P>?7;wDkX^I5kzP+&t14I$Wf@$juGx9K$A9TN9TB(F&p}`G4Dtl z4xGNilZ|)3o5JJW?64C*IyRP-Hj@b_F`9n|cB7{#HKx+@`QFNLHs-nBakb#@j(5AF z^FftfCc&k)D_xYf*Ln|ya^{mu=5rwTzdYW)S{J5dt|>e5AzEU#gM&u^QNMqfvt??qOl=PP<-wE zXQke`jkR}719gO(t+c`p&H3nKbthE z9*X9acbT^Oun|hlouDpBFh*O)mnUh$_-~#eQPndphmqb50)KG*l>@7NqfU7<#hpjl zE?l45)88|$5SdQKm=}17^#0L0Iqv~~;!c|vwAy=MWsS2{=w(Y>;I-uZJQdM_CModR z>24UNsvDqk0T*R{&$Iu2?Ol}lif#9C-zZZ3MBPMrN}3&ZrY+&Z&M+OPEg`5HR}Q;# z{ctC8U*NIm6tSZLcsM8;@JI*o@dK5A^{kGzct8=WK~eW39Vn|>a}1O46WW%$9r2>f zG<2FOGInU(=jz7*3HOD@n3sO@Sici@L~(&vw+f{slv?^hZgnpG?N~@v-ky>4jiJsf}-(&V+ysUpigO9q6=+=U>Wp?9QyT8dL5D|sVm$+l0h89$k5;P!QOYYQtiXZxO za|^xBH9yNwxE!^+TixN%_pqHETK9ZkbZuO={!;ManbCec_Z zVYN^Zx7wamOK(mWMj=i1P4CTfFhB|m?2dp+QPLrs8uKBUuN7jCkVn8L$ch$u2OsV^ zS|~9B8G=QI?m*~aq&y9cEfQ6Mp8H$E2*T6Z&W)146#PRC6*D!37r=5+{YfktC~$HT zdq(-;e4QXb`N)ksAwTv^iwasAjk12zPKTX+1o%XJ73ary@!lMp!3+V*LlA&MAc9!2 z-ezNL(xbQQ&XD=BXQcd9iu~dWW#oqRFi@;FXDaePL|JXyy>jcS&`BTbgoT&XZ~qG( zkgz%S55c0Gx(=i~p7{bAzU*TG>HL2E&@_BFr`dS_^a;{g=!FlNsKae~&a3|1NEoXT zN!lrGAKhOa`s>;n-GILDLzFhQTX%P^4Z%Ji>LbV8qhPG@zJ_V!RM%y0pBi<}Ia!iA zQdIn!Qep2mDysyz~`zY{Y*t)04diNQtBPp^+$17D(&*QuN*bOT@cWQyDgCw1IW7reh^ zW_IwIzg(1#;Z)1(FFQBkOdA^9Z1tI_D~($Zqnt!xzT=TpQE#3cHF8Npf4#}(WJsU5 zgmj)sLY1Ms)`Nntm#=_jd-}Y9J_IF=B=q{UMt{0p+Jbc#V|`|C2e04usUt?tTW(?s zFe9IizQKhf0}~d9b4Zha(#%GFrKf;zO_`P=a#rW3y12Yx@|viv#L^4<-6qc|ZOL5J znD2ZljJ5izwq~*NMDY^H|5tXC{NklA;B47fGlu7Z+)~ETi_s0u!jjo?@3QIuR{Y>k ze#e~E1h(SQkd0w8#meAgQ_Lspxp0&Su5au}uHCOkT|<(DTo5jZo6y(rY0l%}8YNVW zj+8C10YB=EnbSbTH(4k%N$L4R099eqYT%G$eGc={u?~l`ZZ1`+aRQYA(I>Mx$ALhO zcVN8T(Pv!uhm<6x&fHHwhKc>=W;>6i?1;hDt_4|G_we*EBDv%1Cwk7h;@0sJn?qHE za_ZS|y6j#mFY3*7tl#r`F6P{@Do$9At2xM^cO34WWQb`YJ8au{XE2sTr5`tRX1psS zYq1g|xTmKrL6ij~jS{aU`zornZM|RDJ-B{u`!tJ`(iSL6WpZNfG+;3@WFo_2RDju3 zL}gKwtJha15)zhQmR54w%|)EM2`HsaB!IsRMrkf-2l>nxafH+Gu%iwxK0cc^RbTaa zfw4ufp>FLm`Z=%UqIf)BwVl!xRQvqqw6EUlX^+L*?lEnNQ^OX*E%ipu8*e>CDL}Nw zW!E30c_1n&L0o z)%8wYM-qzVUz8xV_j3wIEK}UW?5vex9ky6AN!ldFmRVF_VJamixWFSo)GoGJ+qnDkwQ1 zRM>gA+{SaRtRpf6OHb`gtCL#k({<#$nA)uaS2u+W3@BP&h>MFGw9H5df5DNYBxKUs z&I5BddLNy#>h1J4wtC&*@|f(LT&@o>{4E~n%c+>W^7PN8@We~|Yg5!i+vD1^*?W+; z^>9i}-z_AcLbgn~;LFfEOT${`xaLc2iNbvzZYjcMh0R0I+LU0{@ZvXq&n9NwYVgs; zh2XQv0Ten>)h8qI5wwkd(U;Hd1PiD0rM{ty$lSn&d0o zYPfA?ar{kwAuGC-(C>ND^ECAQhw~`{U&u-y`x|k01V;7U?^DA4qgp!-;?YfB7NH(N zKN2#cmpZuF;1+OWk%-nT2d3CRNqWt!0kPw+^8B{GT2Mn_#@C+OwN?a5wBvuMQZxpc zZh=V{QTET^A~*Hg9bA4#yBcr~}4m<(6_u(M;2mlBLBfu4-w zth6^cEz`B;h9^}mjn1g?M|r^c*Ng%;fAyT33!|>QM}}lgJFXcS9f{BHNajVGLVP2*r=+CxFsq_s`J7p` z>xT$PKk}xR#~Cgbdl>z_|3A(Rv>sZ#W|M5v>O*Z|VWA13sa@019>5rhii!?ms)*OZ zvT}PZ;&^8hx_jn-#bQmD{;do61yp_rlmhUiYzwU z7$z}u`mo8K1Zk^KIq2Adlevv7b@cC8M3(Fs@=HysPb~#51vOFWEB46GWd7vi4ef)T zC5%*p?KPU6iiyo$%mrSNX!DktF1JsaiDn`~P;PIm2M`<5de9j&R|{!2|HA``-^4}T z_Xao!bhUXiC4GYn3)z@&(IE_D^G4y61cfKwVw9a-C}O4u8A_1owmlp-HnOlCGt?J5 z67{cr`~bryr5;dV{Rexv5t<3gctcwM+_AS_1^8wv*WRs_ji&i?Wg|vK+fz#c!Y@?V zYJ

=v(W~EwQeEI;J-AJa6_pF>tFcdT<^LCzemzOFZ0LbB|{SmM@}TU7+0th!6L- zDFpNy`6UH=Q<~P-E!S;40Sof3ZtvYZjC$XIxOMti>eMIZLdI(PuoX2mIK%O6f81qX zdWi1nQ<~+@t=aGB>$RMmm0wC8BA}0MULE>O^bU1<9~M-3Y<}*C<&jWH4mu7AZrBj>&{$AyQQ{?Jd>)jDz7P(L$r+E5LtYRfY(R$8-(oZdt}+7(8s z6)qph;C;FeRWjN;+&!FG_4xK#%#+l?d|58ovFVE%dEy z&d|jNLg~IFxo4Y6;1c2T(I7#D7E#VvNCw*5yv|$oW38f7CrM;X__G-xu!GxHxJTzx z=5|0M$nEZRwRhp_CPWd4P;j}~B6D`kr66@I-kbcyaOF+!PJ4wh<1L71b(*9`<sJVGF?2xG$JxZH7~q&@13y3>AL8BvIf2bSo6#C6_R@jV?*ynH9D zP*pXts7-TU3<<4Qz2#Lh2R)p1_lTb%8W()vR$sEH=EZ<#z4*fO@W5CwsPFq&(!yK? zPBlStSfr_ZX}+4`Iyu#1`%&m0k!qxKb=STa!A=I8H8!xs;c3-`@=iv^((DCr2d;MW zdw1BnxsqOcK-GlVGxlnmr{K!x1>WmZNRiuRp_j+#;owK&fcYElVL087-J!CWn+o`O z6%vxW zlwCS}Ojv*qZN$pxc%~0xP82)>t1Whi3#1ts1RrF?+`znH~=3SO5Vh1OnJffYY$pi*V$Y zezCGf;NMI%i=vAC{|W=_cr6%jA6T2m@UAwh?}glS03I;ZkPp=Y(p-0b5V{axdBBMZ zGw5Xx#i}I_)P7dxP=%!!c1DG0gEoMTmsAP%##<>(T-SXgRCV$n&omBq`)-#UiaJ~8 zEjBqmNE!R-8YfIo&s02pO@8pCb0*=o><}H-@Zw{(PQ9P6F*ElyoBJ~zpTblaOm|Sq z4=uB6a(3OL?ACkQz_3wC_9#NO zu$H)AN^xOs&aku{8LB$|&Fh`MusELmlAxwk0U7=8eWwh>nlq$lD=x38&_j~{m}_DH z)3}eTD84M%)c$C-B!qnoZ%gnJxbvVCu+SE=u~i@XL=bN%)E;;E!w62tq=$qi^hE zDAU702XbD4t={I1`RToKcf=!{0XLD8OpWEDWIX7_P& z!VlYt=u|LzqT89Cl%uodsL*#b`h%Bj#L_j*QdLbYrzs!L^Kh}Nbl#Km;xwbF-pig8 zCG$Rvho?OBD}~GN!NGjzc_Sy{;tE^kMHvakgygg5SrtXk&JGsUVnC2R%Kpw59Lpl6 zzP>G6w2^ml{sWUv@osKNXAoY4JBXCVM#ijQDK?NX>P3}Lin!J{;h)nm*7x0zz*)9b zft)9E8e`yOnVvI(6w5D+qy1&o}?k>9h?4SfBo{>+Bc=nkY_=_GD1^!>xKS5 zQCC)0zhaE{-?@GQ9L$yo^=+!piM|kPhA(kgA7)mqMQ1`LFL0e3vMJb`d&Ti#%TThGf=JT@h@{V1U!XfS9GAwu@bsxmDA&~tKl%5( zQ~d}sAtxd^g|Mdwlbf=GOBz}t-@G)&!xKvZp2e_-e$!@$;8LRVq=U6gmKl~i_ZH|3 zDy6z=-adZbESu=lqx-$$7op56^Fou~*I3ia`StyUmb-g(2MaZ8^PUIh`7-^Rp!0Ha9G}0&AV!t>W7Pa9{#H4nNLO#ov5BH{FeG?ZopZ zpLS@i%p-$17kM=b_-DU>4eJy#siZ5SqCehaR5Y>{qp4}Hkmua_O22g#P)8!%&nB}b z=_$}e9i+)s9Kh&^pp*B9b3Ehwo#_ow1d+RGuq$DH$I?_n``}=unPZl*+2=&r3}_Eh z6Ot&(%ZsmjI+MJ|*$ej0Vpel|_EID5+vPBVZ#@Pn;l6EcTR={_xY0;Ezek?Bpt9Nn zFPOPnUT5ay9L^~!QaavUaO|gU!_qv<70XF0z_RLU?J%uiFYkCaP9$H>DhK$|_ARpj zNb~r|ZpH80><8s?mfp`F+iDwUUv#gA}1>w^e-cknjbw67)=Zf zi@WuCR!j0r2nR8j?gDZ08vV$@QSjPuT*;yyNY1TNdamA&e)kKWR%>f(bVCDmF2z)i z=$l8LZk@4d7I6i(hjIqaIpDNJq(pX?n|%H#6;0h|kKhD)-vT>p%_U(PYQbA=?3+t> zQkmwaa(C1BGw6NG&zRRuB8DMO4i2r%WmO*i!@)tNEEqN?L7+ZWwIED` zh<(^u0`7})sfMLy7Wi}}B5q|tz6&Y<=TArbm}ff-IUVFU_kT<58-yd?#q;c94-0;9 zzu_Y?Y!x&H`8T&(4a8&d6D1F(2yNr@(e(0J)zYUA?#~U-ZlIUJ3v&gHN4m_CJMi}w z_Cjyota;f3DQPFKaxa}vK@dyXt(t>|pG$>Kl9WC+ni7=V7nQz$$RGguhA+NdN_Bmf zJ9CD3KqzvCka0d0t-WL719rBJo*Me9P}#y0R*jyw%`JykygaEy zn}1AoOk%e9*iTql+>&s$ab3PPB13bQOL;dwc71-}jwY#eAV?lZDIrtBH^9rW zO|MqZ=XBx!1W3PTrV--;OlC7nhaSko6$SFsW#pY0y%4++PUY+r!tHryl}qqk&@g1@d2+^_Ept zV#Z#UVQg}p3%hibu+VP*fz=jmwl^CD4Q8mxkWd21i)Z$2od=&cXPv+CAhY3yFj>S2 zk51&XKi zW94JF-FQd2L++8_6aMw*$IPFKR%QK51a8myvuSg~b|Z(n&Y#S;JY(bV5BO?e$ zbJxFBV@@pZ%@7gwOtoB{+2dIC{)u=S&Uz`7Du837e8xd5~*0 z(Zk~8B477l@XFzg|8@n($|Le?ii{G|^zq2USI7g;`vPRwau3D|SxRwE`mj-yC&ObB z0_z?0)9SgOgh!Wn^k)2jRR!IjN8MZTiJO~*KOC$yr_3|f{J0GT$aE8^wA({J>3nMc zkWr^5bGZ!xP}~>LdUIbu;PPuq63+<`dfYPZ{(SPEGi_q~k#keZJM150{zUrW@YeBx zAeL^^MAieL3<74o(Vj^&Z}ML%OS-j3eh-Pksl`t&oO(DM$#3>c`9x3rzCd^)>U>^f zcPL0)GyIoA7;|0s5K85b=_nO@fcRr%1s94xedLl%$KKfKQOGD2UG-;A@WW-xq@3&; zf?Sc}6Jk9VX)~(j&5t<+d`vyNH(eqy-N4)1`@PnkMcJ+ z&%!4Yx)t`*mkgYZRrbIV&=QWEAlGkv?u;fYR{uJRp(yoA4*B!q1%bFE~Sj84KM$qq`NG-YGq$ zP_-r0Oe}EHB%*ls?6tF8td@z6)bvX%Im3o{^K+caKK5FL*c&NS%$eU`FF`qSIo|X>C ztfWYp|ITXT6n}7UfA$TPxAbm?6Fx zYr_3S41QF}E0HfFD;4g~CNCb+8(4o<`F1t-lrGTUnxeIXLiv=WJiD8|uf`6K$BL;_ z7^KSdyKS#jtxryuYZ@t(IPA-K!1>`rWME&aWy1*ND;fiwK>-AbBuoB$pwrm-1Rj;*jm{hy#1jyymo&C(Id;h!3RWwlW>BexK5m{BN zc%+7xRpfj(wIAZCdjhbK)!J;w30_j@wDip5ls#QQFMUOQ<(Tgih^Grer2R4WN3oM8 zsfLNiO?*E8ZvowbswfdXrn(H2lr!=3-n)S|&LpJ)FD8p?*vY3GH-WeR0k_|+d%U0b zd+k_^ALBisP;H?F06H-s#L?P-g|gq}@neVO#<0Oe{8cUCr)dk-Yv*gv#k`*X&_%xc zbouKMvZDJ!C#YobjZD773t+AJ2r|ujx8aFDZ#W2{2_vW#7stsk(KximFE|eH>bp{f znaufGUX9g%9MC!}pk#+6DoQIbJ8m)iTW)S*SxL*YkUm!_A~ugben@5#I-kq=X|`$h zUZXyU=4LG?Zl&33(Dm2bY3>F2oS;UoxJ^m);@DS>fw*s9<#Gx0fxEq8MNWwfY*HbH zH{1(b_P^KCe53RT6V08R-WE}E!m5gl7Z<^&*N*^F5ZeK)6|bk|iX>hH>R;a!0v{J# z09aRI-#IH6Gub-$ZnbPDiHIQ8@(UyNLv1g_X24A;HcN}I3wmsnR~|~KQMFGtsga?p zzRUUDK{`+RJMtf^P(fFoSme}w@DWmWH_ovj4~;-jq9=l&oGHpvHA{Xny=84z&E{mS zl{|?xHhRRaia^Co<@q&ai>EFI(plp^j5qjjapE?TVPlxu-Y-*K9;>Z0NO~8^wEV5g zSDIWj@vQj!O%RqgpPSVvvDK^YkLBRbJ$66hH;`G@y&pimPm;W@6l$~2c-?Uvb?$8` z5Fh<8JBE)^%&RB?>^7da{s`IC3)f6I|Gf*QnH!i}5#-kn8{%w47{~AjRV5Y47|9d8 z%TcatmNQ~SVE(WLEmqb$T*R)E&x~-6pG(^JGM>Db?~&HjTdaI@qB=tj^lK*3by7dy z%yqLGYFfnvt(T$#5tJk7orcdUGi$1fK$BGZ!(0#Q&FD7%|$z(?nu=-SJFw;1SResJi z+$?*Cj7kR=$siSG7!LP<-EXyOfBuOfimk&<z)Bk0-@fW^=pyLSARM_?^LZDqikg85san<8`pWA>dL>st7kb$ zj0pxUn@sW1uy~$|bQRsan?AMCMeW?Rur*h{*_l!@yerp55fx!(@fJ5QATBjYSOr9f zw*LsCBb)J+o(-$L#qvB(KPEkJue_Ze;dXnLYKc`z%NV-KRo3${(e+zmJt4GP2UBhF zTVe#PX1z=AfQ1|vFYd-g)=GY}JAchSC?wN0a5Eh|KOdVG-}1ENz*EE}9)=GWaH|a` z??g(s4(}c@n6}7ObE^%IL$FNhqQ(g!p=ps@prCU`IZu?bxT$#`xEl9`%Nq>_`~82W z(R$KHjJ1)&)PwypIUd1%2P!jsfc+HY{?d2%3}$c0;I7s4i(h4335-3DAEZMjeIIv4 zGcE7|maRdzdk-AIcKcRsm>R`dA zHgv)A#@2^rWHGbBW7gMVy^P(PQ*J|cJ`+g`?Ou#`2ePNAcX=?u@;PI=6Me;W8!o_cg@nAT#ApVF{CaK)rZF2-K(9JmuHKO>(3$yHm!!q@4G z+6-K*vE~w(xi*YMdJat+xx{dZ%l8pw_W82GEqBRg#*#|+eT*c#D{HVKdT_h|CfQaM zYl|SLs$%e-GuX%cb6%Lta~W1i)uw4kMfEabE5qr4!>!X#ubhIB!k9e|%E8nsUO$Of zJs>JxBuCc}C65-x?IHkoxZBFX!pVj*@YqmreI>zn5`6jYU?K1;74Fn^y1XtmuR!SN zme_fNu0Zd8OoDH9cfRvotC(4EYdgWMCNP8OFRB`W^|zl_(3Ezb?Cv-=PQWxbcZa^F zTm(H@Kaui1rE58O1x$;B!S0)MrGBLr&y?S>y!)CeA4^dsKEobHONiZih2%t90Zy(8?Bx&*jO#j4xL$uwIXU7z5#m!1? zd}y8P%ziZ~2pOFu4d!4I_Rg;Vq5l=j4fU6wJV|`QuS1n0C<*h01rj`RSGwLk{uDn` zKY9So8lkLN^YEvGFzXbsDacosK3_?+RgSKm&aM!ojf#rOMhMPi%+vV~Sic{XJT2s3 z*gVTQLd!BWkA!->(RUd}%7H8>DiQusdfpl93nj|AHMkk>J^hFg(uL-Mb&P*wqUGn$ z_=eKKp6>39$WG5Kbd4U|SDjM$Q-;dO&*prO#fE(sG&yy?F6k0Nmb6Xe zyhG2xLUDlca(*emKxF@NvhJL#Axr3M5AJEdIk}wUa?gD!+Ro z+*rg_lAiwdwSmM-yVKyY$qjh}FWgH(ERhs51>1kCi@W6M5h>S{>>sAZqk{$}dS!WW zhr+}?Rk4_UdYc}h+>*E`m8g=Apa4OJPu|oLT}vTjK7HsY8^3H|DGr_5}ts>M&)P8yvr zp2qG~LN_QdIc;AFgnAo3Rg&}%q0RE36Ws6#2Ib3T--uKA~f-~JO9>oG^*KM{+HP-YBs_;qv_j_)|8iW#fbo=QF)em!$N zG;pu$W*A$>Tm96VHI8PTkQ{1=Y6Q|8zV)^rXu{n2I1)lypUsVYcw#;P!`cmUcV2QK z*5rm`8+d@zOa)%rV7Jboiw1~d0*<=FZK*U9VphuM2~F=5jM$?&-u5A zHyE%{5nyLMfn&$=iC@;QUyLuUo`*bV=!i{P(l`;6;dnMm;DW4N z0|LRl@)~Uk9@AGEu&DehzdbcbDdbPcRoBSYq3Y`?w0s(mUj=Mp`(z&W)yd*DmM^=I+d?IOAkH&(WoQue(c)zX_iZe}7&trw2X)FRLfj{_R3gWQ7^D9gH1~tn%Ucl7Z z2;4Y8F3I63QaQ?DF1a@+V@X63TKETrIK-U4{SldOa?t^Z1?^sFG1%daQDX0w35=0} zX#goQ9^NWrWw{Icj(SbxlYF`|Cd2x`K57s{j@=D4%{qbsb0VGf)ut@? z02IBRK(lVFQdtDp z)jCE&04KeSpyz7i;AY|RfL58|Kd*n??U@u=19wcqms*7)``#)|a^l&T18cDmpm3n) z*FQGSqEgjNUihEx8I4NL=3}r5u>jt_xUSVM#hM|D3^qQW-A(_uXD0;`FJb=2(hjsS zc1j>no{`e0U9*b*$t|<=^0B8CK&u9KHEl)AbPkCI-je<2p!5n3J)4B%h1_Ss@^m^^ z|7o#fMWi8DYS=ZG9ngWP2P9s}jBk!9XUa3aU^z+m5P%9C`U03GqpVv)M znE_H66Pdg9qz5L~1Kw#!(*!48Yx?*|ME*H_1zhsyd@y_d9t5YO73p;;S;yF7*+L(m zPh|AIGou3jB*&X?HkUZgG{&ZhmfK%lmM_RJcSM!q=jlTZVw3!RW+RzI~BF%j5 ziKJS5BZSLM4bk&;j=x)c$%lr9R&(eFPAp6n!#Q_^cGL?iy!q3GN1U`1R<2Kl zAVtZs&bNp(TM@G%CmNVwnKt^kgOAfM1t=~)I8+Af(;8E7%wmVx%i!7T&n?KO%YADI6;L*w}URDnP2>0ZJT5?tJD7maLc=^0^4evXcm>m#Jc z#*3u%uf%)KASdN#ULdll;`8nui3t-#EjVVu(noUQ&WmlP+>V`5konZNH8xSILfGTzDn3U zsg&A?6w*SUVElJ{fqqAtryKif3UE4WoD9v@tEN~c|;?^?=A0!zQ~)UGy4T*dCsJ>M8#9-W`y=rxGW-}nHqbYXHy7d#ZM z4T}S?Q$4XTv!=QUluiT+0>gJw4RVv-biH!H zT$_HTNgaMh=NkgY^=Ph2&WQWg6@P|a4>p3C5`h1Ur?(Dk>iy%!RX~(fL1|Es?nan| zAl)g_(lK&$gMj2vuli=XrkraE)v1oLy)4ZeH(K9e|QHzCBV} zhzc!)TgjAK4}Djk(bVp_Q}*2g0-#tuatLulsw7UE&)9e{=R(^>u2W&8ZCqD`A20aV zuXbe~_PC+vJ+%d!4kUA`E~gK@fQeg5G&D3cc57=+3?PT!QIL~=07Vtvs`&P(^wtwQ zN%Ub>%zK@;a6;;GuDk6FH$Udl~ozvxq~=3=ALZFw;+|H>5Sgl z*wZlKsU5I}#*6RN#@l3s;u=Rj)XrDfl%#UYp$;>2mJvBlPI7P8Cn2lRoQcXQslN=G zDALoUg8HVyZzB@8oe@eiq+%vcY05UPLJs!!r~|P#Uc_^xt-X2-4PI-6y8*j(%j}=4 z)p{$jUA?l3*Y%J(%^sS-*JB86FOlg#FW}Z`6?O9~2l2S6bZwi-L==2Sg+RdQ2mVS9 z1R6oTVFhyxuFd%{Q&=}mdO8OH3qBUMYx3I50qa$PT$E_=%r0{LtHwUG4~dL%`|gDR za9h~9v*ABl>t4~Xug}Fq>F8OMkq(qTjaAy>a?M_kLR=ufu*&p|vg#K)hp=qzklG@` z8G@GXZDA-db@aX28zYmLt?`ynuylnn^)Y;JSNmO$@zogFJ*??vU;_F2dxzxpNu7wj z0I}<#IxjMr6*GjKNYK3qlf}Ev<&)Zkr-_NUtZ=?fUoRgsjKfL=y;_#S&M>)B5=lQbH=6b`jdA|_tcz5Azbq3qh#j&P9)S$v zJRJX`wkpGxP6Ygv%`YcpC1|J2yz8}Eeqg0~`m#1dwyPQ(`mmTwd~ZvwrW?b(7F6@- zgRZRg2NhU@)3a(Ji3g=uNE13TPwGrOYd}@c=DW%Ir6N~b@mKfdifYZF%%?_kL~`kR zeQR=V#}|j5=Zdi|Dl-N7?@ZVE-T-8k;ij?^Ryd3Qy-J`%x_nw~gmOTwHBn$KJ0}vX zS6oVKsVH@tj0a^`4Lq-Lm63P$1^)6#xP(sC1%UQ0tpkSEXgtYDs>iXpa2)-) zY?21A4NdbZw}bbr8Ny`@iM~%d?e{}c-`bH);KWNq5EOQ)AavJ;jV}UgF_o|Djk?by zsf2m|>>&l|_t^xYPOQ}RgFbErB%fzAx6ew zFJ5u#n=t<=C~=WaTR0T26Oduc?Kw_7KqdAxcDea>cCfo+{I>bLw=f+K9Uxo7 zb?kNsoh^h|gly^^si&(yc9%=@oA&?wtYdj`hxj^eIK~gJU9=}$=%WCfalKN&IEgH3 zHM{}AA+dj;d1qRaoBch-JuZ0s?nOC#D3+PgazBPWsHO)+l6mC_y2L0*7UZZ(&w@Qp zRs$vhX&8r}g5=hSol+}yn=#*EKhGvnK5b(b8|Kt(?&OQvsfnxhYq`_Tle{<58*Gk^ zsS$s2-K#QDfYX$Y$QsOx0%)-ff!$PV*Kombesl|`5B0(&6uQk~Ul$jN3 z6F3?(S{6EBWekpQuDIA{#lnErH|57dPO|u_=n{ik5`|a;p;O# z*Z2Aw!$tL=@+Cd*RuuCnZe-HsCtC~@6OT4|K+FQ#P2yqx9pQZGR{^g)Vpp=)`dEaZ zU0u{|L6R-n4!O;@_g+k8#rkImY%6Q9=4p8H+$#yngw$k}o+V*GF_v;5NHXNKmB?eR z`yv>cLd=TP`qeIMwU^}kg%io&GRc@fR0{?WdRE0a8J_AlUYo_%$;n*mrN;7rlN0t4 ziqjiTJ(3!19Lblj3T-3ala~?j*3>XhvCOU_u(h?c#UxDgT;S1tTTexnmiav({~}-I z{+p0!``5X%XNr1CzYh+y=r8a#z~GF}ca)`QeK4b?*bRCyutA`DnHsR;0O~S6SDL8N zeE9I6J{Xmu93V1hHg0fG8rnC0`te5$#KPn*ew%m(RFMQcZxsoNYav?Wv2#K+nscOd z4+}4UbW%-u`y;zdvG)uda8AUKlIrU2-taE3BI?#p^;z#!_tVYt&ic$uG>16#5RZk0 z1uPC+^@-lnXlqk2PA26dlC0xYfSZOu28|Pv4Q^H) zu8mwEm`rSAgQRGzaDmLn_iTwLI8qKeC2fc=7*bFmPKlU{zSQML?Pfw$?zg3x^c=nb zZ+VQ(;`cQE{@`9P~iG1mPptIJ|=PU-_gZjvG(4 zt116}v#yIsDC8^~<6up6q!88EvOmcHWh*%@wpDNIVhK8%Hu&0_J5mZXng^X^;TtO! z|6XYLZNxPa7K&-LX)*V^H4tAoWzf_!r2}=yU00u_py2>jI zjNJ!1r-0T2qZAGxuI$(3HwV_?V)Gt6_=-tq9r7A>I9Fq>SsH#}nEHdadB1|C_rf%J zm=Ch$8PsK)voSmK!7BPBPP>Y?q0@meddo#bz;_6{uX6n$+0Wz;zP6QR;(d5BFOapN ziuZONnog&n23$-1g||QHJ^-xX%$!Cj5>ATGtqohvJ}%hp|9~K)aN~*mGRC&{aVVLx zW9F@`Q&6KR;EvlbHlMeRWRD*JuQu*FM&?O;Ty5b^r5VD6uv6O0=aAw^bKBR-40~*)An}FY}ogI`tv` zxK~5Mos)OU_u*~0kZ#M!B5;^sR#Lz%!L1Y+NKq;4W$2(XOxwlCsz0dZN2E|r?Vy)= zOIcsvkaHDlJZhV5N5`1%Sg{8+_t;G%(QUyo^|ornCp`Fde|ouUxDdcGG^lKTn zKd=hbv|&g5v>#Da2`(uip+6WpEx2N6(rsyG0VZ1MqZx$?p{*va-G|4kTYkXAK0H~P zr^>$97c{$BanS=1WQ4hJ91e|lrZo+UCYo^gYLF&8jgSn{wDRF?KpKeZ&Wh#t3jEvt zK@l*O>)CTl#kw3dqJ;j^oRif;fcq*gkdN-toNIhuU7@Ti%qR5a`>7SM$_iTD{W?gg zG`w$8Aw6?SC(IN^1ZELfB7zv!KKV52I^k3pZUgG`i>zcN)5Mgf3F?Me^4NV~i$oVN z+hKU$H`@F7Mu=5witc1vHQMRsDEgS7HuI zYf6CHR*{Ug_?+}aAR@L#tY?x?5*d_OzwDJrJM~SDGp0iOaD^JhbWHcj3Q8urgR~>> zWi>=naSiv@gJrvQiwEVW+q@?aBPFgM}Kq8;yT2>8~ zIpueT0({#nvsR1O*@u%r=f1&rOKe{JrMzCpXv8zzibsxaa@rCX9UKSq?2w11VtI;R zOHyZ`2q@NT6mxF`?%TiY2kuWYYv%uQoC>-|7OC`gy}jxVCy8F zhn%P?Q=0TMwC}8zv;e=^t=FFmyk_(tWts7v*KPB1& zdNXpYXRn2KG~_Vq??0a=Rd>yqfoTU|RwUx3eRm}4Kj)7yIf~-kI1*C4nQQeXIWo%x0Ci)Ps?%nJg!u)?FXjwNt(zTP;YZ;3V zeJ%C8U~Z^eBgmO#Py~AvQTS}tD-4O1qqX`AlrMS=ruWz#D z_YjroK}uqG^j&+u&{+T4klw)l)YnW*|jhX$5|w=5KgZ=f)A} zn+%D&F->r-IxEkx%a`eNjG+tSCSPiF4XwUljy&y5nJTj}aJyt^{*oRlSw(WN_^Q-q zzKWD8LT?Vl6+UD$<3g(KhH5%9>$N#re5O6CrBk^}?P%>_NwSpl{?`gp4RByB88~`k zT1wr@)RiRH40O@g00zoyy#xcXItKIbZu6xwi<$Zg_pPpa50yZ;UDm7VCqMtmq1+K=`tnHSBi+2OtAZ|5n#yhf{?k)NGcS1xV^>xppe7BU<>OJXbBhVtS1GU7+p7?7Q7$W%l?v|DhHs3FoJLrIgEfL>9}(z)GR8aW(4G zfsrTK*!+0@W&qv;-Aa1oqjf5cwlHs{F*lsa?JzPwU5rCMS5)>u9a zYgp)_t2;-d9LXn?xv|$DTz~kyhJJ^48=MSeXlU2a3x+J_41p2+hrNyjmif(u-)!+X znTVZX4@@Shy;gPHr!t-qwWbtHNKY~76iIxJgO$%O^!)xXKTueBcwSE>=%x9_`ueK{1(x(D72312vsgZ8Z%@w~ zRc#K}mk%(qHM$)_#G0sw)5+5Bir5;Xwv>Y3MAS>G+J~mnpvrxck!>q|Vt`=B%rp?& zMb-WHAMPK`InGo&^-%necH=`&7#&rn9*p?=pG|a(jgg5-b#?#G>FFuZi_{0`<@AG= zpXGBRby`Nq0YMx{#gAsr7rCa<4b9uGrBiJ)Q2v^K?sgV__A)&;hs<5k=Zfaf&SchOpgeD^4zq5QermnF)d z$>EX%#xk3N&WA)?b;&sgNFFLMk*kjJ!65(4MK>0V@;gkHm2T)UVJNoHuRxcX&wz&7tuXLN0;DkF)FVN~nqb%jJuHuc{cw zD6d3cOs#jmdjGwW2#h2ke2|Dzyh&Nah%d`z=54_`(YK)>-Jr7!^%bsfW*0@agYSSy zall#bSuWC67k8=Cbxf7dRFIyv!3*i|GE_PPW=b;JDsL^Y=Dq5X#rdf4Xk7Q7mpbgJ zQg{Ctm-DR0kR0&*`J+!Zo*hU1z*w)~E^u4bd2qQl^ z+VY74lMK!M3+yj@Mh6zlDI@AD*|Ebr)E6NrvC?T5Tf*m;2P85QJrO4tMH((A_e2cZ z<6Ado~; z)M5v61xhDmfK)cU1czBH=n9Yx%!#)+pDJ`?gsj|7*9-ng%S{d}G2@Ykty4hdN#1I5G^@+igZp*_5)@Xqw`{_Roy?$`Yf7Rls$>B57>WzBnwCTMl%E zA80La_4lK2)#x>O`D@kS3RcosY8-Ob;v3gyT-84&Od%3Q5_J{RIHR?9lve)RzylT> zIyTybCyYTU$IFXmLDWkWd#wXr1eqe~_HT^6i-FL{*fVwPb`O?oC5)L1qN_P;@kPb# z7O%v>7VqeYp_TzY<;PR2506(*mjuP71*S>i4#7T&6VG}gIm(Hu*AtUp$M6{YiDzP^ zHzx{vy5k&TRr|a>xEm9c@!}^DD{FWqbbpQH(?PNVK0~10$r_?(LfM25ey;XYOAD9~ zfJ!DU=og1{n`Po8L%_vQsSk?a6Bd`#Y!AUO8f zh@cg%N;CKL?+io5i(}X7tjLPH)hUAujvGp-HArw;#32*)etdb|WE|FeM@a6Fk(Ku| zSb|17AzkHcXT}pJG&Sn*%%AHJ(g%cMMkYy9%uw^+31AYZ0}fbcm6_h{e3EIYo%I`a zsWn9)(=TO~vI{#A&*BfPECX7W)K2Gz6y6bopqvgCk77DRCSh(FTDohr^N_i%zrw_E#x3{74S7!%AQE6|PzE3@7t_szq<=d4}}B4;QA>`OZ##4o)@|PHyH?i6=SXsksazudb<;jdk{(53R8ZzZ(=$ z?ADM-c?n~%)4UC75%ZXv1YO%Ed#Q3RzMF4y)}v;@Qb}!isaGfoDUmKF*~^zk^fhB_ z^;Jcn!-m3vklIOZ2e5|1%{(95K7xr;!r`mMKb&RdfW-cfys7;yh_xWFB9yaa>K_-G zuC|6)f7x8Ub)q=XzgiJWqc}QhU)X(k22^~FR~xTgF8iJ&$iDsFlzno28uZlCbQMyv zH+}dNef3fJ&1Yf1&$zCy{7^-1W&F-DQ0XOYUOh@#dtEKAZ|W}XGHAhz9f07HWF!lL#l~`?>{>8FPv(m(*1B75BAuwV!=%f9!7M#XpT^f1R{`!D9nMu_bh#G>v(En52$vu+ZQodQ!8$Vp z;ItAW6nNNDnd9+m0taOIfdX6NHVbX}+{X@y zqnXYF^BbfsdDHWlGu8j+1^Dkod6`w`m^L7{wUIIq%_RZ6N#_t>31NW$Q27Hm=32Y^ z1tWB5ous8IA8bj0d}jTPaRwe?e{~Haf)3R--)ekH@2(`nh%;SU_4Xi+9a<~+o-Tp1 zPz`?2x0{>YIIE2T@6n}8Ou;V-qMxUC1F?q&=4+-3aMSXc3$2);hH2Z@NhDuQ36%%= z4E&H$>)17HiM=)FS57xxbFCm04&FOUMQ)Yn@+usAkGJPPt$EBfmyH+5CImEmRC+@< z_3^>=*=y*jtHN-DPW@KNEwYOmS@WHA1*mTN7lr>7Ha>hU4I85}-j z_#<%@MM^b;mpq*aDiJ{%ebiVE?#j9@ECVs5cUL!no0QYL^S#!>yElUA5Nkcbxxd-n zmS&s+9{!y0c@<7b(k_Y+LH&(6gw$A_fq2s5as=gFt5v#Eqwy9jzUZ;+{QK+fSA+1iRfQ=xm%Hi+_GFr1ERT)n%O;Fui%s>ENI|5lt^-R}1%% zjgNz8@qT#0Z%RKKfF3XP?uBl4d6#Zg!z9+*-@g&%s%EtHdH)VSj`>r3>3hPLQt{f9 z8l}R_FHp20ng1{g=ZwzRJ&6RxQCBL%UZ7f2R@X`)6c#i%O-+V|PTO6l#0>pWZi=;+ zILmB58%`XW8OI+Hc!XlChrs&ime^?2Sn5IxZQN^4HfGS?SJ-JK-7mCwv!&=h)kgJ}o8L>y;Is?IFwb3PG-0W`@61^8bqUjx1bafG@!BtHS z6HH|VMKNTqd( z*!+kh^TI%N1+94~239~#7;T%Z(921z@i!!2X$7>9oYC1$Oj@(QG&AOP)2QkMcwVuo zccP=F4{#f|J{Yg#ON+|njdMHxw)b?%TPKL^-Ah(6RxoJVN0wn5BNrE=sFpkhG%xXa zew)oVH2jt4Ii!cx)a=7XlX&P0EpRqg3qSPI6~oq7o`IoEWq| zH9s7ql04sLw*rEcTJYwFK#2N!pi7pE8oagaqO6HYw4(!YJN-;ppP9C#$L0P~Ku`Gv zKC25+c}a!c(BA3f<p#+T} zAnFHy=XN=t`R*AP;p(gnB+d@WpkIY_^Sjr z8vYi%I+T`C8iHZKcTo)S`yMHiJgvpD}xlYy9@T78ZfXZZK^jAnR!3nTPiF*bI@NF%j)fr%E9K(RoeDvK1zd%)$>TMz;Xw+i*Pbr{AX8L z{cwD|;Jj3gbU`WO-$UkP z@z5mmG%^z=oE#~7BO)~|3rZTkBjmaJ)^7%X_Zv`XZb)(1<$m==JXl%A^O*FyA9?FP zXbLN?JlV*|(rGJxMaS(&CLX>pc<}}o4dR_EWkS5*`%BDFuhw?)^1^-%;pm7F&Y!V4 zP4?-9mMMXxJlOKz*xxb}tB=(-HWNIS1 ze}gT5%l_}!FXko*KeoyG`wzbnN#apY*x8L0#rs+d250TXn`BmU6li>qgP63_5yOJX zu@cxvPSl1OKhyA!c+&lxJJWO>nVUShxRL#pTU54oN6ee&lauI_{Y%&uNtXc5!rVl9+s@PFr=9QvlibKoE?F58JU3cn zbo>Tr*bN<-ba<^b<+EWuRH(=%m|}vL-WJ4EV6CJFwh%JrpXE;*G*z}mqj@CXcKiM{ zEq9NTaL;V-+THJBBEBR_sF;T9=Vq%OBk1&3d`F9yJkgo)One zqXBY**5Dxm|Ps@+mEL7gR+erMmq@}m&@X;(?BH+aA*nIrI z1I zRIrt`V$9vn$2dxC(mybr{JKG8p%j{0jBJ&|hgQ#A$kV$M#7zOYkx%64iQ~e{y|$l*e+i2G&p9xB<$tB0s0S`m9H#-tW&BY$lG@H)zRp55Eo8A}Xx*5z7LDq&(N1>O1(MN9tS z5B`XBfe^hWgt~2v++&ZJ;+KuPh|Sc8$!ImGS9+9?K>q`aWX7v7%s~X47%E{Sg+I6k zgnu*cKdr@5r=#Nz1@n?W9lg8%MF=fWUpvvZ83_sNUPfjGyYJ)Iy&TDxnrcU8EufRt zFZQx~3L&y&C*Hz3N^IDwedcQut28gpW5Su z(^~)4P;2{SzAzcQ#4n`cv%`I55+Rgg-XCK*wf`J^9(uNQ@SJlBa1moABr5RjUTC)V z4+H%G*R>kM46hRC^8tbVDlInStw8Np#@*62Gaw6<=D|c3i`rim=yDdLpb~D$rf+g$ zwfC@VwfAqhT&-jX4j-ALHIz3C0)RK1R)-O46)iTR!x-H-9-9Il_()MLw>pc^7noLi zsyuczBI&MXZc_IocN7kjGGN}YvV<#Jy{?0ZMEuJ<@cR>@{6Ps46D-35X~hb`O;5fC zLDq9WZ?U9+$rRQ&1*4>9Vt1J|@I1GBn1d&3hg1P59(LO0Sm$u8d+d78Gzf?Hw%Q{# zT_LF>m;gkhekmOBrYeWibdV2Ssd)}f$#Lh@ct$VYgf#nvSzt+nDF}z6+_UKx%s<=@ z!E4rG(K9%&wtN%LKS%7jH%sp)pABcu*wPk+cDhw}acr#kepve`7pdgn*njDwb~t)9 z7*Zr@Wo;94>1jMFpb{O4jx*64k>Z@~?ydGB{O%5SQrd(`6Rp_Cl+XLhnr_MaSorq# z4o%3xqj!z79#-pHX*$F(qwp8#5A6^Yt!QzoNjYMTa`5cR@W^OUy_JC%3A5eGe|vDh z`fYZEf{u#>dL=}`StT(ev=v3}YO0XFY4g`6emWt<5}!+|)cCJbqc4NUGUm&#idCHs zGI!mwdsPpa?EW zuwHWvJUe+Oh$P$VqXMEEF;R?$K;qg9-^VRIG;XKEy*=-2v@AmJeci~GGw_d-or-@WN5A%S~T6iJLRIceW z0mDo?)}gLfe^Yba%?J)0h~oR+_QXzRaI(#Dp9Ju5_GBu`LcD10yR+NAn?_cSlxf#x zzN9B1Gxiw)M>ERROl)0!(tzJOTQ9{G!+!3}du#Gvrs8h(Vvh3Isq^G~kXhPS+exaA zj1;&`jds4mmmWIbKqbC%mQ9PlA0>9ZZ@W7^{$zj2B;`lr`m(FDLY99ib5Z#m32}~ zOQ8GWH~9_grqyPS*b^|FYGA!wN*ZXhn9Ij%@Z6W-j~{T1h9)a-lnk&+xj*s{{W*X= zj;3aSRGYOpc)S#V61Z4-aaMHB=jz7B#uglZq|FzwK6|hcgHd7YW_rxF{-u{y*b8$t^rt7Y}PGJD9#B; zzxcUwBpNmz*;GSV7$k8-B8{U zcC5BwZ5A?baoawti$r!~6O*4i{eEpCRTC0)t-+9y0-M{1+bcg5^FrKIw`J9KMo;f! zA;>W*7y_71osteS1n-T}Ak5TZML?4j)sh)!U{LtrkSHD^g~&B%*e>kjq+XA!5x+kq*;22dWXn$sVc{c9oC+ov|qTO1~2+&w! z1uS(hr=(<|hc~Uakbp1RLjyEKXGp23sZNPUg~GuzPJvwOco?0SEPiRP<(ZaVw_{9{ z&s9|h()oE7ol{%e!!T7$({TG;wMAqpvkd(3!t-F4w+HWbP+bpuwZXZ!c_ibF4Of{s z`km3#AQ|E|X{kKH-T2h?71-T5G%Vd(DQc|EU$8c&!4 z2t)O+0SrerA^u?o|Ja?=6$wmRo7|;tmI%u(vd;IAdggMAUTYXLdj|hBtOV}p4_>=% zvkFbw?~ey|-swWjx!D|MDh{t*-S!L&L)t^>yG|nde2S{k2AUxmC*yejDqm4jzr=-% z!H0Mw{hsEM0US(tBy+jTT1eAha<}f-OeZ%hM-3y8`jts&#Obg7GRE(N9A!rMx$6Ym$guj>E$ z{FP{e#1AV`1RO{!L9ouN;io+q{)kG>2Of1c?K$V3R7)-5ddzPj?_b7iuRe9?Tg&vM zhRXkql_`yEh*~k2TT^qjQ>&CsaiybcQ53ARU0;cqw@Bo36p~$AjCOvXMx?*G`#TBm zY?o8o_S2Hu8<8*6)q_XQU|eN(3?#Z}JGHaYMQXSfH6@3?)$BVF-JUq+a%0bIY8i5J zGc|v5l=MNYQS_vW-^@%}s|^aN{Kiy8REvWtUo3MfPdDw+>pbrwu;PzmB+&8Zu597o zOL5}++War0lZU7N$9$Y)Vu7^$$a54%OiR~iFLQNcv?b~+Yn$Dx%@{Egc69mDY->kf z<@hW7@^(#Xd}=eaJj==~8R=qi#tjnOWYtsV)v-F7@qo_V^3djAAVpF_=HgpruVH@LgWOO#V{|qK$ZdG zm0U@_oyGWEQ%p+&J5vdXRc{$7u35j$lc6@CDuFQ!DdP2*I*c)ui=a>a^`FfwtQRoAKOuBBO%z0C3$))9m% z0bi4S;_QqDnz8LRx;ouZK>&>cSTLr!+VQ6i=qF!?ET1${ow|>r`p}$97zd6H@*Ug- zz^QycPMp3xR-$!GrZIogC>7xd8I&ET)==`#<|PN)m^k*aH?l2T1;77b)E9S5uZmcc zr5yaIWjm(#Y;9!1Ci0HX^B_spb*-FgeR8ZrPc{ie!pk|0ODp7Nve}cA`(>-EPn1$l9606Dw+eO03_l zZMQC)`P$ze6%p1RVOCn<=W7Mht%y2P<$){7h%B;-qxEePb8dH8-(j zpHU(<8Uk@67?%TD1L*24-QQkgP07ZK%va0#Da7@)@gpT(N@!?!{{D*8_++8Qk8C5U zXG|id_*=bwwvAEs0;7B5E?~Y6o|Dnw(eld87R>^<_`=0>{Lk50>Vyxq5AjcXX_bGB z7)3k^cu53C^h+r_#fIULI^4^DBmd$g(Wa7+i5r`~UM>LB(FTuo6@sK$vOGGb;ikzW zzzjIITVnEs1En#ZnO%}n3l4d_NBc6LLXT^rCHl66-vR<>S7}?B-o=9~pPHl6{8>iP z`t^O`J)P6uB$$X{uHc2+YSvGpeBWpM%dHCE^%`!`e2q$d2yX-WXgQ=QvbMCJ2M+5T zQXVr~RFH4rnXf%q#PM=XNSj+0syc3cUY~6Sxmw;iXJ=9|0*uUX&Ax|b;4cB=3Kr4J ziQJ%5RnI_t>Jk+;WmNi1XaB@!rGbIsGXm_NXyt_kJ|KG-Z5?_73IrjL*K2H3ZKe*M zr{nY+K|j-*b)?N5DDeOIn2$?b>VtOKDHh~R z2-k}u$u&&cru#)e=Ig%$ zza@-UQ&SV?Ukb7d=HQr^g8ZAag2WKH`0((HzqpWVN#Ejh@k2K&!i5t4UUSV75;a9+ z3Uxni)wzk|0ps_<>Q&kIwJ52eV4HiBWnn@9=+aR8pe0{Rdu|iP!*6?-qu&8*GDKuh z-1JTL$cg|W0{|v6FTaeP6+q{KY$wa;5M3Cj?cj$~%`cw2F;+{d8DYZm3X=UQr?+uj z1v{zS!ygWx4(EE6X95iPjREtqsC{6HknPJ~Z>O$>yrsW6VL#8&tARPKgt7z?0kL>_0y*SYUO<<0Z zS6Az1bdq!o&`GDfQo7|Y)YHFDobA5*MnUDw!+)F5^^73Zt7-Dx+@xXN&+);G3zu)U zCp+xDWgVyxl|^6;J;nfjfZzR{>0VK2-*}4+A*JS0k2ctc5}V`amZ0i_FoUH z^TioU<*9LN%d}Zvy8|($^6Cjs${+aa=?8uBHlRRwNyF!D+^AY`s^n2Z$RgZnCFJW7 z*b*+HUI7jt*_cK6kW=HQcfqUWZ0WA%CWGfS_vR*RiE3-Yg`!rGXmsftR$3^24a}9Y zH+QJd?dmpNsk*nQ^NQJ{?0jMQ;4RiwIJfxgqMyK>sXC;8GcVM8YUNruY1K4AtBfVA zwKz}8snrG9*ubyz$*|0{R6Zj!|r=Nvo^uV(z2 z9N6?{Zt_1!2wT0Cbv-|hx@cl6Xx(UyGr3ggty}!YD8FuI9J2h*K!z}gB}Ia!PgdS4 zxE|CISE}8_kfm0DROAv-OxRDVe+_M4_(%49t2}Y&N9e;DDGU@n2valqKR2E=YnQZkU3P@L{BwE z0!vcs*E2{->%Z?cWp45BnQjzi;>Kt>h__C1XNha7p;EDZwT(~9&5#eD3qHOa`xrj7 z-ZSiTaAM)u$DJra(GVsCoS3|WXe=wk4ZvUF5|;5*cpz11(C&%i*Wd@k5&)j9v7e-R zexR-$S=KO?DMo%@`A=pif%4pFTG#2ho-;y~q8*d1Zek@cO?_m8lX~mnBKOFwfbt^k zeT4+OvVSgXcCl!~r!F=CIP0c!#tPjowkYQ%Z#t=khjpK&GJGq_+6k~(Y6_)_mFS4 zSkLL&!k+eA91HseY(JiuLQ1yriqU@hFt5<=p9VL2tp&=CXVdG=>(ABZ4iKPAXV+%f zJV!t6Y~)9ASA7zCJ-CL}Te=Sm9>Z;~!aHN=$#veWFw$^BcJADXyDhbmj1G^QTQ+0- zAS181d!aJ2L5}WO7oD_G1{xfYQd7v4)!ToK*7t_bzYKNd@d5=+-EB*?Zp@dMf=Htk z%fA*=0e5XE;Ox~xz!_lxqj=CC`(Y=v#^p-Fjp*&4Guk1FVk><0q@Au__z&N)6P{D_ z#YHy6E`$6u(BJv|hts8G@X3~#;avP#fEdZt)_i`+1OuHp@Zg#iDY?} zK!Vyztsl7Erav>bhj#`edCN97Gh<+w&d}{V5WrfmZM=Oy`u<(wt4aoI8Pu#=TXLdj zI?p+=d=7mQ1u)dtSF9|}d`#Y;3g4J%MWi^1rH;a>B%rfNtf=d?w{ohj{T&j}Yie(o<|m=7P}0_rjKq?tP7MoImx15 zDm{`L&u_h@EKX>szGhh!HQrxN$tr=Utf`4WPjujWW%WT zKdq$Gj=E|y=>E)c#W*h)-IKSD!V|h%$g7(W*oz%&%ec}>gyNyOWnB9{R&Z==;@;I2 zW9^qT5f5Ll(nkeNHOJ~~D;LfG10yw&4HbtAM!n27ar-|+>F@|^1@dHd-8?I^@ws~r z{bp;m^ZYPdPG-d!ztZO^>$;!m>}(%K$6b~xT7!>Zh)tdu_*bbNkA=hQr=#X3-CO>- zZ;hJEYS9olm!#WnnsM+nAR}A|w9uEwLFmEKm>WMF>i6lV>E<{VD@Ij97GpzBF>-7Z zQc=(P`@?>{ycXrtLT7$v^)7!wEyD@&YiK9n;=y^kusy5Kr2x27b z;1Up+lkPs2laeYQWf!ifs3>kpUutx}vNrS_$f~O1>MC-1WC|EO%3T}7xh8wUTEtWo zCZw{YUzx^DOOmpt6Kz8oG0L>8LmBP3Tx9R}p{p2)VvN`3Xnj8N8>$6TxbdR-d^~aK zK_k`^TOt@}4)nHG3)*+PS5GVnaZ?{z$-`g>5qj%w-p!-NIyH+#l_|v^I9J!a}75g zI*nPeK(ZfvyOgy@$4($J>N`))N11;)Cj%%)zRVWxf)ITdX_#*ON}()Ku_UfBvQT+3 zDi9&QcIMcU&%_wJ)7!*R?F@>!I!cZ_jNn0gYYg0kdUm2LP(u9kMd}e!((%__AOmN1 zrESpq*5;25^*zI6ZBge7lH2d%;(-@8KPl$YpjNquGgO=^ci$RE8k158qbn+=EkdR% z(jKr1X~X`sQx5m%==K&$!kRD&X`=)m=!Gv@6{e7@D;*Lk(JcB-t$~d~3v|v`yuVNV zcf=D*Wps#ChbD^bum5 zJ6zrP^+w|VueI}xhpX$~eUuO)Bt#iKdI?dZ4I;rrv>=EUMDM+KqD60mMDM*DCVIJ* z(RdCu!&zZvY=v)k--t?T!_)&fa-8HUBK5O0`x6l`?5Sy}GF z{H4EOCiT4>V4S@)o^B-m%Cic7#^37OW15*z{2fvXVX;Dp&RooUdhimXtxhVeKyS+p z2QKhZoNf56&MtwumP4A^)GwW9M1ToP>`o>~qv!YffS=F6^|>oq=Kj66NhvZkbb%V? zi7&;+$vJ#zGE+37sVFlsMM%}lj|BHbhP4>zgG8vtB(6c?j5~Mg5^yW;#d}g`TT$YC zv$;RO=31^(^(;fl7bSwT-1*8lpV>nRtnW(niiU1n>$`rp$l6zO<3*pSyn65TMDDdj zJHo}|1c|laBwM9@a&JF_%+|Bb1hD5TLmw!ERP&3q?mFsF)xHV6qN~wS{jHUNiMKf4 zKAXJDZP=H1v3ohHaJkY9_L6}Wkt6aUk=LJw=*G!fEi^7KmTjIygR&v3uJK*x?#zl* z*8^FRDj@FEzrAUj)BTojV9KtD^S!Nfkk|R9B>Z;On4^BXe~Ut}zFrHC_qJ8SAzvTF zl@t|HnCt6RqGZ0UZfffJlgne%1Zayp;Lht`Pfkt|`DZI6K!*bx<%0U7Yyo(bKTd>C zO8kSc>~};qWll0HkxMNBh6q6qstXq@J~qlr#eym!2VqTQT~?(pz3R)Mc9)?j7%x`Y z#>7#sciPWptu=n$2`d;b@9tqNIFtm$P)M>ib%PONy@X-Z-b-$}EEp*ckSB5L{qWeY zdOGV`%CqHpLXhJ-9-eNN;pZoUwT$hJ3@T5|(5s9dJID0&+biw|*{CBI%r3n(KWQg& z$C>M~WJzxye+Zc-FO=9>j2IH3cNkTW^9cLfG0u9pnm4Sw!2Vf%G1Z}1wJ*85UikU8 z=yTlB#@*=P*gTVr1yZ{1Mjqu?MCD4b-owx;J7O4v!J-Y7Srx|D;pw@>G|Y4QM7*<4Z2w=2Qu$|cNx6D zKgF7QP*~TlEU&txe6LMzK6e~u-a8Ey&PEA-j2nb&60BpSU`-lF<+gvd*Of3o!x{=X z7#Q^0W}X9?sYk)%2|RkA>;4`sf4+}54`Xd#T(1iH_4Uw7@z`U3(DNS^xb$Kk-^HGa zh~)JcqhDG1EAygayO|WVs304~O&jfwPD>X%MF<@w&CH8x%IZk7DWqc;0~KHGDkFqjT7rCQif62I<-G3!JA2;CYdK>jx8*^PVT7akIqx`IVIw zI-1zPD4WAn&%voqgNW}oYy``~4?f>(X5*=7ZN}^35D&?*c^uVUAN3VrE?sF1v){@Y zlw)72q!BQ*6`GF$;xxKu(=qOO@V_ZaE*CudRf#_W*dL9VtY@Ko#0EhRIRsN3bf*~^3M-($bLhyM zR|QlqmeshQ^P0WA=Aeaax8!)fFS6k=3LE`5V&96rgy<)bGvRsK&x0}!U& z>tqDWcVFgWr}IJf(O1IfvmX*UNi4p-ZdGgTE=ey-g(4a$`EW*2v90Jttvc3c7ErrX zht^Y81c5#{oX##OyTqtO&ayjbp3CD5$?qLW3iV| zRrKx4shZZe)Y64ldP-<3Jz&Zt^u@@?$kA*c3ul#8QZVArd)^A`go-?CudT+Iy5brc z_w~6j7nw{JGi`)+d!vf_SLz<2z0fQT*6Q{~HR{D5g|Jcs>+j;EM>fdISXZcp0>mKQ zwMkgPW9RdqABAcHS@@%f zZ4xhnp{BQ6YM#)|_Fjb9X}qr7!M)W$%0VVx7x&=qY}sUy@O~R=Zg?bHt~2@^R#GxB zAmT5`$9reY(P!+oy%`%BX`Bp7a2jOo;s6lHEC#tb=YK9I_Fvxu)%R32Kz0hxY>2h) zk9-%QS;RLOTrT2KpPS^FiJ>Ea7nQ#Ggju0^2ioH;Nji^CygF){9n9*+07ZB%j0iIR zYZ!6l42*T{2t(|J+Et!hTx9mcGcr8?VKr4W^R(7X@;d>+xbA^!$BXOimm#1i+w{#7 zVLnoBF0N?jM{U5|M(+eY-a*_F8nh|G1kB9lTMC*Brga(qs%Lf8ySAJJEPT}?3hwl4u8 z>D*s?G$6YyyoGJH`VGhI-l|VLA>e&Xp+U|LKllMtHS$+LgRmYkeNDbYO$2#ko)p|6 zdv|C3`ix#d%`P=M3%4yeGu`M@1Z*WiJKvF!Kp?ufUI5;b^C{Pmh`x>Wjg!~vLB`YM z8Ql(sZfEvIZe6d6O%)M(=>aa&McMS=?O)<6*}<&)r+(QEC0T{GNzI?A1&yD=d5jyp zO0s14o|~;ZvA&n2DjuUL!#C-H#!FCUl1t2e6>$XsZ4aV?st;P49nDN2O0~*lq}}|J z!0pI5m^rt(k5*_jZA((l1GVT1+oDt{sNe z{tlyOnV_LcM#CFm-ZgXy>T}#jkgJRRXry{4;tsREg5EPhKh&RtODY;sXH@a z+cJ9w62hA%(eL&044X0J=sc|xyRlCrN319?wIie0C?8@&4HU;rPT0-?z$%mxzYtI1 zejBl9_B2-1cgYCv0f#rBB*(djczb2&;4=*^;<~!2QD?3(}+@T?a zHsY0{@vqQ-YgWO(v2i0r`~gAAdd7v48i77-$JiH zgF@o#(@-15R-rkV;9?vvEoA?g$}fWg@wm`Rk?INWkIEHmXf--8ox>xdaqoD5ackl9 z&vGBvHbr#5+-6pedeo6uujdj}CpT7xXm)DuoWLwuXrN#9uV~zmW>`6AYPMVh+Q|(3 zQpbza`ISwJfZc#~e4;9DQtOLQM#7>B0{^N9@6XAk#)UalV(l#;*n?(<)JVrjK$ybq+ZdL)f94t|v)0&Oe3|AHu$Ds#!l*oI6%5w#5 z2+`;~!M8uwfS8Ov=O{`&rvI7mv&=q|rLwi(ZK7XVV)`Y3C`wcYrkBxLkY1P=bu2_L z#@S~E&f0$gxOWjtko%%P+?~gpc>So%zG?232zbZ#)Q0_zezh2qUa;Z+HolVCPQ0vTZnB@XczUCjIasKO0 zUcr~(rb@up;_&+m)IrU(FTr&=`J+vJO-}r(hJUrNs>O}&NaM>F?g|Px7J4l>=2bix zFjII}^cd=`X4z7^N%(doVVdz|Z>cd1cX|ETb9ccl-D*I6?`apzsnU<0gudnyEPsEv zhHTwA+vj96_vwr~fWX;_jy5u6_qT3`27=a)+Z=J9mnSZ4B&_y01&1#Oj}qqgbtjR9-iFR*6qaE9X}yXu=AAG7p_OY}1Ay!K+G;t&+@?>T zEi&>lP{CyM5wS8$_`@t;UuKE{{bDX7bJl(`UI5agEO_9&A&#OPc#^9BTy9T)(Pp4f zF`dza7sYy*G(giLneL-mSAC^$%(?CUu|Uq$Ciu>TMe-b0q7NMSYMJeD6gt)=qtKKCCj+LT_vgf&r%8R36{p&-DW& zTsa*d3V=r16ssosvmpS&JIV$U)n_+h*EK!ybm1@&Dxz?J{>qP$$DEgHah9m0Tu2;u z$3FPMo+gdijEp8a9lBV-#!@}#vozxvi_umzRH*w3eK~jgwkzw&G(e50t{sLmgdRau zrJyGLaYGl!l(=nFSN2t$ID2s>B6cUol)MPp8T)h^Ra@l9Ay;~(YD(##UvP`+)v=Xe zmw1wf#?cxo?vBiB@YYnXECw_$2yo=!xUp45&r z_ZoiNDGYRm)$UId(YJ#Dvh8FgojXxO{~1&Ez;es6^Ywxz-06B@%J+IfxU|Zd0pDgy z4Cv`?{3$4Ki&M(5)(8$g*ouBH{D@LP&H|S1U#IyE67^JtITM=tc(~P)wnj~!Px38_ zTD<-dp0S=Ewl&|4zN@dPt?N@UM4x>BdGFbk))h+|b@;Z@ez1I~0A8ujT5mdIMKNDtKRZ= zc~J<;GT(%;d5si~&D*Ob{dE-fvR#MB{BS??lt*swXA$4bhL@UiS@3H0u=E_{7WcBUXikv?5u37MrD^jYsDpXKRBH z1_Z?EA}LN93Wfgpx7nvb9UxM;-+BDgA!3?fMCMArKQ3!=ulJSEAdi_+1hH@t=RG6g z_ktf^mcr{9m_&{ABYGMd?HUV%${2y4Q=B-kiHvo%~Qbvcv=`<+PSzyt%wsnwsMAQJ*aEF#7nz8^1sz8NV>U z9@2i?CqKxs6W@>sZC)xnVCcBnZb{Txp@;am?`wCoseH9*DJXDykB}%TsPI!?KZ;d| zY}`#U*t$?mh91~)^U*sDa)0mOg3m~>vGgfDGoItt!kkvPslR`3b@; zL=goAEbM@{r6T7O79MW4Gg~=}nZ`*So_0s%gy37|PuXw!#%trR#P+n4q|WMJox3FuhMsYW2)C8e$x9v1dq=1ftn_(D^SuT zhToIwKKT@M0qEMJK>eTX*%rZMVqgKLkT-7#ygWT+U)nA)asD%wtoNR?C>1uS-Ej>q zNQF_S7<*SJ-<+j9N>ra_v|=405M!R@y4^1nnN56K1`E7DH$HCkcdV7W29iK$DswQs0X{{OpfrxhQ67+K4Ye6Q)UR6#)TUe79k6~|T z_Cx?3+pBAcI$8jBbaZGWWg_9%`$fy0f$@0Ph9Fg3prIB5JmjXIhE>75&QD@dEP(K)w2EV&ZP_-HIov>i1m0>FP7G+ z{$d;zK^2+#$6TjOJ~FwD!aNS!$oZG==Adc0`HuPoICCj^+a?3ei8eH0-fEtHYOYTO z1TPws0rMi0v1fZUl8n7VS$n+EtE9-lct!?oC;VJNh2mzFiP z)kFKMuCbM+Rq8L4M>47DcZ`)3f3>fVsIYMcnY9Y8wWfl^e@YjP_d&?gH#N+#g#a-Y zj^sBQtaTHc#iN*h)>ZKA$qz?*#cb|T=5{Myvrqw(`<>9AMUIbtj~m2u**Y{wO9N_= z3X8ss{ctxYLcd@4_ugY+PNxfdek-R(iS%WVX$8(mhd(+Bb|&TB41zKjvNC8ROJ~wY z?A*`K--JC2C?Od**Hxra;YGz_5dx<$x(5(F+N<}F0Oe-T>Z(v=Jr!*@mw}nd*~j6sK4}-W0eQ@8?A&Di|a? z4Iqwm6#<2b>O+Ma3ZW(-dfaB~0yjX)L)?0rCT>i6;Tw;Vwr={NC%X|i>=7hd4Ty+{ zbqJyOXN1kZ`(rX9V7iYZLGPJe9i)qA09_~-@f)=5v%k#R&4H$y3;2EYQBpcyk>(B z=O#i4UIf(M%SPNGYfY8A7I~=4^~9(Wa;GKMytTf&G4lCKotoqQ;&UX)(a1n%HKsFme*LtiF)Oy5DATX2rsbMair8)p<_T(?|;lop5D$ z`xw2z6TspWj7fLC#<(q_QlyzTx)({S=ougVBz+I&HC&cB_AqM}(1dH?F7^or37!+i zL1_dQ?{hXcG1H_3bmX|P=xM=6e(mgZr@KVQEZIg}W? z8lvVI+4^bXKDfhuWzum-35_)N9R^l>O znefk-RiJ_{BHHa;n6Ro0Bj6W1?iNU>EkWUoZ=Gq>54sjEMPA@1NAOw4!ZT};gTb};RcqZaTR?c_z(TnwmO0%m*ncDO%!qgyU32r&CgSoq=3+ z6J4%R|9mEtXB?oZDjmB8vbH9EekAb%OXd3{QsDxtGPbUG+&PJs;iXdhfyg*@1I0*% z``o~!;paZ^qG&8aR)@S}i3AC)QQQ6d;(ZA*u>#J6tmS!~Rh`K5)5B}3C*yWw?CP}e zni&W$FwsLLIzeJZVxpyJKQ2*mn~@T>zag@=wno)q*b#D4p&!|($~DaI*dGk9v(asD zF;4y9rmAQ*_u+NbUOLoGZ>q4ut{O-Lspe?AjSURZxL2|!e6W*2@CFEZwLGECXZ!yFka)}aD7Fd_#)EY3W6FYAo7sHXLgyimWcTDFrjp-w*lWQ zXr|0+JfmD)pHpEUI_GEaZ;Nc2U|-ofD|a}dA2PK?!P>a+AMNH>`fz)^({akxZ#IJ3 z6vhn=tz$bg^mhOV{$S};aVD_3BU<8w1v7hkxvHbAJn}7RDD01JWMiR%n}_}DDu7}_ zDfhDN2K0Qq zO*E3C?duFa z=P-ooR}FYBX*PV)?SEJ3Rlt(0C$Cso;}i#U3YsRPzPT$}K*F&{%@`k=oBt%r-UR*p|UVn!Js{>hGsH8Rp)*yMPa9265<*V$Y*p(e2EKfg&dN>zQO z<*yo#j}b(C#bD~&!^fs^_TvSBNR(xsMxRS#T?3GbxgVzC@y-)Z>?__DOSwKR+r6No90;wHawb^TVt~KlO}oFdKXZHGzeyE7 zEz;((XQpm?>A$&Zy0Cq$)GmE9|Kkx5HG^ifVwY}7ay0tY6Tqu(<6BdbD~b9S+HvGD z<=@qzg+$u*qh?GGf(>WeKUk|BUOfeLwiLjE>^ zvoRGpHO}UV4>y&{@WSz8cJr0hFlift^CFVjm`xMP%)uXudlUU9G*icsPa())O{=7# zDT~p!#z)_sTX|meN{kHQJlayvNM+Tg13-IwA1Mqq{AHtRhelsgBb36CUN5WruT`_; z&6qyk2EW2-mYv9C@r$!}3cI{;DP6N(W86!}8iD1QV=AXQQs@5QK%QI@gT>i_`}=*+ zk|9Cc$TMXcTZLQ)TCgQE_c_lTF$V{>-#SP>W8nUSGU_ecI#uz`py(O*61$z^cnj(q zC7I0S1iN&zhoW)T5$oTcq@Zx#berm!$r@;vOH zGaCxz3;d4)>SwoaIdsUXSSTIC=$DogW}}=YV{DhVsNy zJPCUqsRY&;JHq2xT))%bzwVO6416X#mF4<-`IXCRg*MNO*sLGhS=uH%b7O;nzHWd< z^V4s(y&9xgMhOKFmho z_WUq>QmbJxf=uDw=#p$qgUEum5qQ)@l+@?h!U9cPxiw&ddZGBGhLQg!B^DVQCY}G4 zZ!~d{Z3A~DRHbCXCmRS8mJe}vHCM~TvIqf^8I)>T7yHZ`^cRV*(#C9|NceG^N-9kn4*Xvf5MdqPyo?7Zs>-plW;S!3#&=mEA!6j9d zg-@=^Zfj-<)_!>(38R;8qv9jOheJA*wEj7@z!G#i*&32m*RP+VAU9wD3yu%P2x08H(-`<+5if^9;d1h z$RE>#0iF~BC`c5{KL5|m>}ksbZlbio_gMT%8xpK(C>(R(8Fyy5`L);U%NBQ+SFg+> zdNQX4+uU*7ol_9cDK$>{DLq46k0X}gY>&qCweSo)Jr8&YHzv^6({osY9PQ^sv8cUZJP;UM3_!XkVGMv2m2Rh_qiejmiOT}3VF~Wj zeS9D~**bN*FN-^TU^G`i12UHzW+d~uUe zuhWADh_sy~00@km7ao%Q_Ii9Gw4qDBcvtq<;vHGWMVQV1&&gG7t<&JjX;Jit!cb5A z7r(4j2Z;xNW>&bf0cA@Njm1!`E2kB8avsM2%^olk5nS%w_wd33mpHPf!cxdp3mA{hOKGtLYEK*xk@_Dvx{|BV>|BNr;CIekN+HD2d8%>P!gLy z5LJ|+o*UP3!%-cTFU|)hL66$K=&Ut0Gt=3katDDxTH>!$v$E{3{qcS#CqL!RBnFyH z;+^B%#cdmZ7FJf!jt%|p5?tf(9g^XSgv4Wa!K|=HWV0c}C&UdNw*RWm$hM-e7r_t@ zdI`Tr+$+Z(oQX6`qFE!2U|9s%a(y6k8Z96wn5qn^QrFO!>1DmiJjjY40^qrwWB&7m zhipWCuXjp^kH>!hk{aCAN6swDk0eBYI7W-d0h-oJQ+mUbr6?W!M_2EmHG%0yp+Bbjkt_H@k4x16Q_!K0yewn{;&^^U`an>tN!Lv>$ci6v<-9UjZaY)(-jKbj-pf?~n z6&l0(uO(=V0l)R|{KBXBhSH1J^FNjfYTzGKT?>WhCp_y}t8-U|`y{jIxh$OQm-ww< z{{>p~Sk&*pR5%B*7slFUb;^=a5 zwI=~Q*HG;;W^%-D>jk<)nEtlL5PzyA99seoOlU#N&(xL5y zq;z(Btaw4F#u7||Cr{MEqvr+~--BUaoZx0d6!SI4A>Mu89okESdC-CnUPXE1js00A z-0K-V2N%9*J&bTX{M>Q9>rh_f%S3!pQJd-i$HHI!=IF)kBxW>HeB&lK7<)%j2HVqY z>2|MWA1~^nDP!%rc#sIG|bgb z$IVWix{lY?d962ve5q;{PI(tr&Urji`XWg>0qEX@#vfiIZ?DUUsB2U@PHj3)r_+QQ zvBWk&0Vj}v0?fTW_Uu8bV%dq?`RwcTV-T1UCq=xxut(YM_Bo<`ypK6N?^ylzSo&75 z*XE02b??>H*lp|82=Zz~L%iR2x!LJW=P~n^&iwOPlbVmweN2O$S;m_5pnru(j2=q1ny&m!0uBh2cJ-3A5 zpgbj%9FxfanYfz65Wg53vtcs+`d0zAMO};b(*>LD`Dr<}L5x45K`Dg4bUjVx`^FIx zI5{-S(-am9E(5>gvQ&WQ`=-X)p5ZOpFdKS1PI4WeA?6}~ zPi~_<;Ib*e=N*ppNvi)=>a<0cs)X+mEuYgXnm9wuWa`>@?i*|=Uharn;Qq_&Gm>I9 z9Jl~xz*}ec|IMC-n0L56br{;e8r|n0Qrx>Z3%K-RE%C=P)1*+z92>xZgiW zzo98Iyv59Eg8cr(-JZwIph<34Ny=_lJcZgyDI226Og%4%U@Ybn_o8sj%T=FsdCGdG zRXXn9Twl(#Au+Q}(-x9Zc(0%Zeglu+shAo;qZyBBw|97G9Kc-e7t&8?Yd~=WTt{D8 z6Ay}i2?~VQd9SBve-KNe$qI7defQK52JqBw`@pcqDXJuB;GuHJ)NW*I2fo>iyV;zQ zIS>gr771OZo=oA^PL_#WS|tl3GG-K*_(KghaUKzdjBoKsuW4QSp;U|3p_&Y&XZa4G zaX4Q5r_^K6HNm*_T*c=3nwZh=dnS7%SDhp!l~(TBC%xe*Z%rdK%;KN*SrGTEew(rH zGpkb2RS7_A2WY;uU*bTX_lZ1>*{Ci1sHayhg~B0t07e6s@aBy&Y_{64TB?3ARz*z|P8(z(W!GCS(< zVoLgYN>v*HN>*6vL9j+9d7t~ z%(soNKP!GKu$au4i@n;CVE}EcRQs-Tmu2{%p)n%#%xSmx*-*#i$;>g%2@2;v zE_4oHXI!@R3Qw15t~sIsZ@KWuDEGE;|Ic^dG2Xiqf9<-SfD%BTSs`6q1;>v1@;}Gi znydncY8Y_s*7z=>g~~+4IfQq;S+NS#G4Fh{jJ!557D>=_ZTnG$B0J$F9;0Gl&_+AT z2!i5t**dP)UzA^3ego1ltvO?F5>{g63JO2!@LUhKnc9_+&_$J5{ze__`(*xdDX1Z3P*H`2gGp=i6UY zdFpy(X_ymjcH4V|@fiKh^phGGs6t>_N~rP@ZT?7@`Apblz`C~ox|NuGaeHA>x1S?b*9knmJorzw_(M);@~_>VFy< zihl~o?PrG6=4=mW2(ll*9>TRRH{XXbkYEdmiGkED!ITUN$vii&z)`Xvp2-&jEu9)| zXH!@@pCgGlvyFHyQ|~Eb{i4zDPg>7(3}odM26)G%m5dH_n4hH%R0C_v5L>oUOl!z0 zGCO$Zb=~vXp5`QP4sJ~tPtg_0yNws5oB4|55r_omd372ba}q7~ay0e`cC~vb^uFtsI>8x5iC*JSwXshbClf+(Z=p zae3NVHT%`4#;VZREcunvp0geQ{Pq)l5%sWBx6qGf{>M>|sA^Jp#G;|7_=O$N!(*&cgZs cwd1$akI~QQN*p3J?g2jvaw@V_uZ@EL7huU(8UO$Q literal 0 HcmV?d00001 diff --git a/.github/images/time-usage.png b/.github/images/time-usage.png new file mode 100644 index 0000000000000000000000000000000000000000..7f641c48342c9cbb0e96f18eb04cb3a4fc208615 GIT binary patch literal 49567 zcmeFZWmH`4vhUm7SmPGl8Yeg;xLa`d;1)t~3liKVxDz~RAb4=M-~R10FTt!J59fcSL1OlPU%1Ek#KoCC=2)qnM1g>z6 z&#!?%Fp#XIn1(xeFB7Td+0^;>$7!9ss#l-Ke<2{iKdUm9GL32tAQqVZrCNm(+fyDc zh8+c?3JWL>lY5Z|4U0HiY(5(eycB#KyWAl%w!b)jzm#=)-NIAzqW<9^?SrrL zXsg?UvH}`dq63Q^{-0kFDE^O3UuIuhp#R(D9%#;Ue@bI0Atc1_uP><(KkN=FYlqVE zzh8X?QbhXu^B6ecKZ5Xe1NB`0?QxK|{C|5?h-e(8F??k|@%$I-|9k=Pe9_5&YeNJY znA%TjdRfch?+yM4O2_z*RsoGuF^3X@nX3`&cZmPh)~6=J|8V(RFVXy|Bs!qRaa;|W zGymE5r{@>K{j-CAeLW+ByaGj*b$)I7`&B~lOS1nMFrd3R{aEY}9dmTMDXxDF|5|~LW_48^zm(5E_JVcS&1JGw(&JP=(><1t3mmUXx z{e6S)3yDTWG*|;-+#toNkd~5>4k!Es}FGZ*Cx&4=h zYr{5;^}5|{Y<5U0`DYY#G5Yj@B?2R_6@D6OKfGB|ElOYu6JuPp}tY4 z9;X|P(Qf;*-?rmxZ~4TRtA|(0_|^ zwIS{|+eJgY1V($YgS5~4`ygSOJ|pQ|>+inWF#Da=U@h>*SHK_b)r^e}CL|8kMF$r3 zjtBue!a2sO?3sP@J~vP7cex&_O?ZEGB|a$(k-MXed?_jm3yXE~mI=H(sYrY!drn9D z0(tqv3jIbk<^2X^?0#uT!@DWQ-)}zIW~^f}ILV?^Q~EikQ<6YZWrer1V!WvRUK>*T zrPbOw25~@AE%!;KK;3YGOO9=KCqvbb54S^gb9PKR*p|bolp>FJHFv-;yJ<1=A3Bc- zY2&lJu2qftaK1x}PW&={S7y*WAGFx;ve8x%h#N5=}&_!jQ#n-Z4}@^5kKi z%{NZB<_#Z2G7Xse>$```_LQ&ni?1;R_SilCMDg6gNHUqqO=bk`+=HUevOXL*I>X7H z#!6sd4`M8~VP#)<-R{>Lv<;8kE(c=X!H_psz=3x%aBg;`T3_`A9yWN|z54a6w_%evpfnX;WAI5j=dJ}@7FMsc@|s}m{WeV>wv>_F-eUa$QV{A zTQ)}apb++I$Z%f`KWK!JP0`O^OH+8)PQI9}P|g-6NjeUt@QHmf$}92ymkClRdIKi$ zO9sk}otqgWUlz9|@A^GpP7e6^AMZ}ri?V&>phGc_cbg&@(mQvdB6nhTt>;>y!4&Lw z8P06#nL``DMcbnVPh@74kVqZ;T5fhq<9E^?wm!$q3+f1BhfU9A!-Wa|IH+B3aQvg+ zaDBQt4LaWqFI_;YrnL(FzI{Ka*QSfysrAN?Y8NC?!v2R(vr@34&7_cuQh!Z zO+BctPPBg9k=+E1k%dLE z-?S?T*=Z%1V37R7-pUzfX6K)3m+grl2lsYDqz>23Ot<$YgmVf`fqY;_;73lY8_Cxql~h{`k^l zJX`BegQ7rl(e+DgY0{N)RXRLGrippWY;S$~pij?m6w+0ljR|eG-M?s-ZmgCHWSzl0 zu)yriDc;GViqG*!h=cl;Jf-6wOU&W$~ zYz@m0ZNwcJ4@zN~!u=uR?|?(IO3Ep!ALJ=+Xy0=D%I)4CiQFM5%zxA^fg`pO8mvUN z@R@A8rE}0XSEtG}_-C=IMBOh_)D$`kyWiCXB)oVOGW2X{Es1_iaC^Y2KN!sI`H9Ec zD^fXcUD2vPCqX=J3uX3rzda#!G4HG#x*o~awgp9ve3Q&(M4tc3gR%$BJaIq!@vi7Y zC**TyAQIAw#*7v9S%B9;>!o1|rssB~FdH!a>8R4)49bMx*}=oFHaWeyVf8h zuGbPU*u10foSc15M|g%*rOEL0r#+d#K@F3bVvFwKqOdnxk6L=R>-`+}KalA&md~Ee zYbJuG%XBA`LWRRN5vY1_EyOL`t1RMZPd%{v$lr`r-2Zeoth?g|#YML88#7 zljBv15270$<&?8$fq)4y0>Mq{yK}5vul3^Jy|vE@w3;kLSZR=60(00*7Dk~;G>+>U z!W37jjp5maL--VP1>s|rCVZVew{k=0C8RA!Gl-Iy${V`kB);XaV@;}ZRNjh*4*3zb z9fYpv3;aBC-Dgh(O?3W1-ZI9ww=vtr6(U z0;tVlMZ9$_I>pGRvDApSJxG+Q{1l_v&!fA5)kUd|ohM>c4oTzyJ6g%>Y-`--N8%x8nQ^hk=;FsVc%{9{QH64Q$!o7r$T45WPf>JeNRV zB9w$v%MZ|uaad?}o00ESMG`=yucY=hf))0V@jB%AdVF@Sq?s2uY{8xHX6)sfIG3H@ z%=SGf|6I?AX0F3dr<_%mVm_L|H_F+aQR=hL!~q!?9zUK0e%M?xLUM_@Zebk-5ZDcz<@@SLx<)aZrI2mn(GLlIiniJ(A2I zS#&O}Hota1dj5hH-pB<8!wl+7b#Gw&rsVnaWTh?=hyRcrOe4fkN7nLExLj;7*g`aT zX_$IgLk(LL5n@!i7+Z&dHa}UH6V3R$wgRVPd$P#d#jVF_cnAdl>Ufemkat=P4^fes z#-wvUG*}Hm!GkXx2hpm_%Fr`^q2xoVU+5%6esD8^vnvO8g*!MuZsT)|@J_smUnprX zi~GDed!Q(*Ufpo8roqnax6<3MkIz2P;$f+tba-!~=KCjoBQS7Lb>OoB4{JQ_@L~${ zQpZ1iVp*Pf(>C`babhXezJMj}wD`)y=B9ITeL6q_-j=AWy$MR$%@lBZzwLc@{GC;Z z)~_Weq`>F?X7o$hjON(6TcImmOJDHOZGe30&HW#WM<{C%mEX!X+LG64ROC=(INO=C zAkS|s(X#nA(JPq17HJ-TlB{p$jt5=l2~LO0`svUppEu1I3ma(p_D_sR6HL8O!LfZ? zAXP72;RtjrJ|Z73eCxRuoJzo&a0R$~A7f0E)EO|r8rnhllE?(*tVXqy4+qVM^A`Fn zf~h4BgsFLw0zVxHN2HNqf$VUk*Rgbg0{EET)IYD#pe|QBAMZHFv5VOWIYGC{iU%Ap z+)z%+hRpky@tKpkuVgtP{ce%2A z0~OsX-v}FX?wzIEE1LW;XB5*Zo*AIeCe?o7%OpeQcR+C5a* zRdA0&1s@|+zsk;&jQmgn&9Psg%ZUFCOxd+;lXD?LPAH@C+GEK#g{q)m!NF}I7t&70 zf(c5Dkgb%cQ=1{_SwRAwvF%MOv&R^|XvX2(4RSWB7KR+ucP#X1Y!5#3M|^BabjR2- zUK4+cET8vC<_N;G+iXvE(%YEMM87hq;%8W~GsE$^igC#cx_&E|YBJ8pAqP4a*5cd< zS&@U0Clf9@Ve@N+k(1FyzN}1CAuoI8L!S2j%Eh?HELyq2@P;IwkT#LlhGo!&ozCE- zNazLL$tluDjs0Rcy8V*8z)#Ac8Y?3iB}ei`re|#CX~6Gt2fHQoelj(JMJHmjIooP27FkKO>r`V~~V|4Uya*N!`W+ZsP2= zhx-o51T`3R!kP9Qj8^Z=4Ua{3`gnG&$vlZII+d$%v#T-oVn03tbYt;HaPEYIEk?>` z`0$TZIDu!DHf{M0b1``oAxGCp#X2qcP8Z7QyrJMm!a3^Rp@HkYac}0%kz8o?eGluG z``IXKTkt3c%;Hf8whS_<`!AW^uVADuJU44fxyLf~M5(_u5qGv(KL2H7c{_G4&Kiz{UZ z2Wb>JgKKiR-~#MjBL)S$*mUQx=`@L~1Z+zg2-qVj3Cp@zm)SX>xE;Ws;u^xosnkWn2y;ZFV@mAe+WFM?&Mrw_IRc~?Iunam@-bXuu&4{EhHN7C+e91bJ zRSKJ?BfX~zKARcN(H}5s^*ul831{x^>)pucZAye7=WGnc@wINjN5WxeKJxn9xr?M` zZW01?fe-Peof9w&oM9$kiW20wdDwn)-D+h7vZna?}yj=H41r|~w}GO2`b=eBc-B1IkIZpR>{Bm)8o zQm}0o)=!pjII#m%Saq-f(9Ho~rOHI!`I{ZM^BiHu9o9>N)tnbVTOEu2Zi8uR^yTDr(Te?_TXa(1}FB z48*?9+qf8)Sb-&@-%T*kGvNBWWCLPHiShdCr33X50Vv_yik+^ z$-=pzxta*WY>};ylWih^}zHv+CK@y*LCFBEISs^rbwuvL9CRBw;(l>ogQv{*xiKs5+C$ zj5p$c?BN162?gr!&KU_wp{ zU)nO);fH&wx}Boyx`&&iFF5wj;B!W-bC_P@TfJRwu988d=bw!bnm4mPjM}dE3d$_J zmdQq74i?ltOKD;EBN_wEAzYUmxO&vIVpzPW41HOf#R_7kpn2?+-mD1&*nEnyT<740pZH#-XwanP z0R<+HNot2O|E$&RdWRfT2@Oj@{QK*`Qa>f$7FdXfPml#C7a3Y94hgr-);x|+`oO4q z92AO{Ne@S7^)7VH?Pop#9p1pnQAKcgd^|K8Sgc42N2;nBcYJ=HYzY@pRDSt^BuJ)X z`L_DIXoxO-56c-((<$X?BQpy7F6wtsX+&*tw6y2Z>6fw0`HG%Up8<{yNe4TRMsu!! z(X6^o+}?xr2ZtdQ?4jY#OVwa32!w-q^3m=y4|ADub+f$`EgG+ik`d=rqfHjNVH06a zXSQWlV>1@l4G7+QLl?b5eABSnQ$oXIw2JVHX4qy-tF(tK4aHgCjNXKr_ZyW^-lMXK z#TK+3#&_B|&*0f7`FCG%z8NOGlodE5%oNA$&Q5`QR6vGV1opFEMmg2w(2u_Qk%*jW zma9=6bqjW!4ej$iIDc{k&6rvNE_X73~x75GO2NubVdyWxHpNme~Bk zb>o=zwaxqXU2RItX_T&2qCI)WCj~_Yfn<!x9jL-lg>4ozhXELx+{j) ztta8aUuDtamZwDq^aeE2pg~rWEcgb7aX8>np#H$LY+Q3bMtWE?_nWp+JA0xkR+l*E zPK^v&Lp5jsUdDuDAV2pv&g?HJ3D9S9?g+erzh8}bx+;}tZAJ1oavd8A^kBIARTUM{ z->#~}JvoN{tR&ed$M1v_dirWFED2LF{`FaF$6&X6;qxMzpcLr7^ia(wk!)Vm^XCF3 zbJ-@w{Qvr>xbuUMnYDmf`MmBCxa=?OzZb>!xO7z(RTiay^pDREaG!0Tec!Pu9*-(rXt7Fxi(gxm@MsSf!%KKh zqIT4;O23n5Hb8G8QwfAKAxVje{k2w;=EJER{~QSoS+bm%&lN9#DMI#l`Qt`8RnTI= z1<^5FlbqjK(kS)&t?xA7;K=E?Dp^15b-pvDxihlFoc?BV5Ly^FVKnf0*&n9QKm(mc__~UfA8f^EO8R!$$4iHAL zN|QQDMl^rAIFY5#q;XsHuxxw*aOFov07Ds?-<_;=X8gnyzJggasTV1^8dVT++vp6k zm)M!BWw~4z$qA&)oXRR!f7PZVwEXl&Dzv_>JP(_-?JULnp;3W%+nmVx?YHHaCH=7^ z$*85qLF%Q+r&SZ2_pG14m$0KS?DcQB+sB7Xk!v6z+L9x1%NY#|CI0LY0BAev18!0WOuDG&qk*#9~q0Af4;sV~j9 zjSfiCQA<`4x)yf8N*_-IUX2R5+V1CbQrK-fDMIo@^u62gltK=?djeVWp9J=66P|Ff z8-N_Uxn}gK+wb@7*8vin2tYjAXW(6(R?l;DAbpW6bUCk1?!N2~PbJp$#GRkg9o46! z0z&|4q{QMkDpby91x#tVP5b>tbvK4UKL9+bPB%tS1A>rYb4s;u$HgxeKiH?R9jqve z+`oj54RrX!WzJpkv|mS&daT8b3Zcjz8{|;^oHlFZ$CckSu`|~_I+u7PU03>$@&jCGJ2Qn0>C+N+7 z?N_U)+S;=L9x6=*`_;WezQKbnY#Adg~JTi z9~+7CQdL${Uy|-tFnzIhV_1BT-$P~Y1U284*Y=i(<9r(xOgqUFchsx+~kkdvuu z3;8s5lK_EL6#moU7i8FL#9Hi#bPKC*0Jdm{)p9l9f@6o*b|8-VmJ2l`wgT8{}{qFE??@`NzXoO1zBE9{|SZ!giB!% z@w*f|kF){X(JtW{*g|54p4+r;$_uwZBWypi3f^pe7UOuE&9mh7;)=}G6rLT0tU1LR zzH;aO>@ej8l^-)&I1_O8&)1^TwgG293toY!BNS%0Sh=o@43mb!E}6Q>hC%Si>Lw}@ zXt2YE+5FGW2vqsX&XAi!EAFO98`+5qHAMb^hQ7mVI>L*yVmzQ?iFL- z>4x2(46&gmGc)HfqD|}YQ3fz(d!2;av|VrbOC@yTIeno?cSz*Way1}=18v@U1d!z74&o+>6gjZ}n18t)E(&SZ2JOBA z;VxHF;Q5JepR8B!B`BM}uX)4S_P{|DC&KLi20W~2_T)yHNnwj_++7E;P4TTbv7m{{ z<-D|I76T5>+6R|FTsWa}^|b(0R)>QXm|xf(u>PYn*mbA|CVJ1^_P@y}Ar0g&i{|C9a&sdGk$AdkhM-P7f-1HMqU>q$L~|z$u{oz97FwU*MN7Dp%iR>wo4zT5*L~!w0xRifi0WpWAoH%a? zZ8SnG0!4{Twr{3GBYw8a+t~p`LqyR9I~sLJ@tWAA1<5l+BARSunDZgbQ>Rm-19=xk zONQC7t+x;hIY;^Z3ras@e+0G1MA8CkKVyWnY4-=!&8-3|@`1A-F(SSXUNeF~xQfq9 z>iSbPzyhD(#WuTbC;gRCpecE9ybwfycSGLpdlyTOl=h>`iV%qHZGPd-XefYVKW@KTeS zZa10pFh}P%4SdnBOpXp<33qRTqt)KX1^wDeySLogbjZ8yBRM= zKtt5!CTsVXOK!H`=SLdizmRop6NAyHP1|Q4&k~qD9=+2%ZudFMl@{P57ZW476$)g7 z6Zhd#QPSoOc_2C%f@zn%8Ked(u{P|Hlh1cZJIo_RD?wUkq#Z}F>}b#q6_yN^^OS=w ztsXp%(hR+y&JPEUEQc;+yI6|}q0;w6Sen4mv>JGe5s3!((i~$o5BJ!LfSFR;$orcf zJQ%rH>uWkmOO+F46Ep~c25k?>C6_zja&(kR?Eqc}{zlMs9(=6zT2!#iZm2wlTK^8f z%gzp$j4FS!=X`O%KJ#o7s!=6YM6mKE;nn6r$9@!suJ+-@g(cBI+-Ks;Xlm z0I<}Q*Nm&<#j4cW7-zPf0SLJnJa>}H4)p;9`wGH>r9hI+1FKjmlV9AE8Dyf0?zzcU zY?2GlhWgX{WNpBX1O1YK_0D2e3PAJoi3ni?t~rXY>+SaOPvO%N{{(R(S(f#0;YA7+ zqp~4EQKKr8Uj+M?KpIBU=ECq2s;iBI2ndmB&69J)98CC}r#>hMQ|f zOwQ2CkCo$bLqt-N2CmFv9e@&p;qC%ZNa?viX=!7x3Zrxl*_;pU5vm5fM8DccQ;{QBJD>^wB5D^3VmrW7CanlM zNYxO3+JE;JcSk%+4x>NS=|v2>lLYUEruaKePFg=5aGDunu3suA(o21E#YvIEq3lX5 zTp2#eI8O2ClIAv<0>D7?=dpxK2$x72J%dT>gj2C5Y>)>fYPPp!y=e89 zdd4q;#q`Bq)65ZSi`3TQB6^jEBG&vJoBa(2D||*v+Mn=XgCat&^C|6evIY!F1Wa2a z8K#DyM;Z0t58GPcKnY%SpCTM;zmnLJ*%^85vP6s44?}yQ6v@vCjBvIXzGW@!f!+3t zQjC(PsHKtf*_UYE-Hp~)YiW3>iA>=onc>Ot$oV^s1D<48sXvn^MF-{UDu$dhq3vWwNay*^pnY2$N9atfmG+}1e%rB; zu7HPJz#NZqX+pIQ;Gzx6XzqmoE*kp(GCZFM;B0ppX>hDwPSUp6S=Vkj+}~OJaQeC) zWA(1ZE;Ett_X#sp#h4y=kiTZZd@leW?6!Qae-b)Zs^nMd6cN7a@c~eesHSAcO;eZc zlq(>02})4@-TzJCy5)4i$J^RxK`6(;I<;x-6-eqMsBk)_*6pu|yY@&I1c)GuAx|A6 zjB2pEu+hC66{!Aki~n-E9hp9JudR*a^Y%z}gEN6HkEN1{2E&`@pZg9|WeF5qo~DxV zA0t{O1t=E%1H^#$-`Ox~t(M9ooOc=cIX-V{0k{=tJU2D?- z`f}hrH1_N%kV#=cs8-@Yti+!JC>9DLcz5@tqQu8@+Xyl~+30nkbI+dUDP^X6^iJ>j zX`5)GZ^6<_+6dxy9z|qE5dxqUv#|fE6@%7{`VH3fit)h}?UY{H+QEnW;(xghZx*~` znD37yOk67l5aILs4$*VJT%-Y0vrqCrl9Po(+b`AeIJ(DEGo1BP37p5Zul$qjJ=%T5 z#C}z#vFQ64whK4Bp$~@wBqN-%^nmTDDct{DViCnR;T6y7dA8RTgd_1ab2PFAPX8p* zV`>%Dthh2F`=YRFHki-6c&^-Zmr(F&*ZMl2{`WKs?#CL%n!Ij3BkghMW4y5Q`J1EG zbADI2iO{4NHm4(t=%heI+w=g9IDMeyjB35LQ-FjJKkp~_ui5ZR-czC;$49F!i{=^K5Jj4ChDE$C!sBWr z+x%t2C7_GBBzxOdF|vvKx#y;d!DY}b#(uM$oI_13h8T}Vh12z&=v^Pr+vT>-<`!j& z=X#BtP&4{8Y5-hWX33d^z68+Decm$$%S=<tPTzO{BS3t)xkF@h&yw?CKjWfRr_ zqnP-lf_yp;5{cH!D&*579OoGfs??It4_InG@en}Cr(4tNm$Fo@rS|8BcH%z62fW3> z^e;XZFdPiVodd8}U4&XrhD*d}k!K#|>HU8M?5_Us%rMCiuGzQWzi$MBvL|sDjxZmf zrl=SgOk!G3w=R!cdbp@}TJ}eht1xWEk@Isc1At345K*H}2O^QX!~A7p56Il+DKD3N z+xJ>dN2|Y+e}D#!rT~Hkf!*$lIeRvNy~;p+rvX|{`^6TROe!cjDM>aGpJo2idw{^Y z7eGYBL7PGL8%~*HEoA@XD^G+CEBbJk8~`k=!$26uygye@(8a7zlo-<5t1vaY?<)Ha~4{Rd&O)5wCU9?}>*z-Mxpg;?vgHGX8x_;AoD-l*~H zlh)VQjml@L+LbWdN9FF&m@o7dZ8u+PegU|rjj9B-Yy3Oyu&VM*;tp=o)eDc4?r789uCU4h;)W(3Wjojxd zCt`}f^3b??|80k9Y*UKBV9m~I?uO7&Dcuo1vl*C9b-K8OHdc!r&BwmHX#1fv>xu*p zu6Ky|!u5Wil?>7r_WZwKiqcN812!w4%J#$tIHUXo<@`7|+ms0HQsEbU;Ot3@A=yYu=2I z`crxbUHE$$c&sOJdY-+NcEaFW!;+>-1SzF)_VzXc+e)7}{~It_&FsQpQm%K2-BQTJ z>j5;>0CQ1iLS94ea(Gf2_Qz2d7`6LAumgV;J7A=>0ZS#QmNFIqKFv=;!ay6z1{kw3 zJa;VKAgB=;S9G?}p!q!~kb;&bf7{Ms*4N;YMDzAO2{iphbLbm_7KQ#;?Qz@f0h9Ob z1z!Ii-)6?2onluE6e50q2g&tzy9ItgU)ZGYw&2QYFgVrVXvTN;Ljvxk^p~h;XNbsP z&~4`7%dt#>nfu^s44*uDteO8_HjU&tjTs_7Ds zVm7tT{cK!1(Da9}y(bA6iHAxlTI9i9`n@_r_`*zqTmy>Xrt`Zty%a{J*1Z$ID@B_# z%e33IgzDAZir`*yI=oO(z}IkW+|V8bJk?t1L`d(>Gsn{q7GE&9hxke$Zu3i|hMzzx z7>ZNamD6QBXk69>>K5hPnvwk;i;s9z2@n`G)6k&Fbz*!8=3uB~tUvm{5g3G&B({8H zY$-#Jbvz!>1U@4c^@IjQ@`3B!&mVIKS4Z8L_L881P2lXt=u)LX+y-2G-AZ#4ux6)e zgas;9@dOeo4s}t4=!}^tr`kIMG3@>^Tp-sKshe_`&N`(4yDr2yqYD?o0(m zL%BM32f2_Zykn(?sVV?9tsT%Ih`)cH!1IXHCvq_Q%4q9o!>01@uQ#)se|8|ER>j+n zP>~R@H}yn@qa^F;r)}ABiYa^JJkC3hA+w=kO;?ISWqmL9ek0ctbK5Xr3Z1EReY5e+ z(Z(`=)D@R*3~||=9>G?p{ZcX^y=mnZ)+`(tgm8PX&|DNzH+~N6jG1i+jbKvG&aN8m z>)`U5`@sovA@dCbw}FPwI|HfR7%6IF&6Q_=vZ{}w9tUR!)Ny88QN&_XGy!OOTHS%C z!T=a((I~YXolGj{2OMbjCgh?5m-TxWC?8%fp%3TmY*Kkwc}AAw#~*$vRzQ!sNsa*t z>yHt6*y=|Pq>X1b(D&VW-4)?q%EsJ~TZK<04A$85UX@A(j;oIdKqco|lTnJlhfD5r8x=ZxS;dmC^Y_p<60Wh|+FPaDG< zK#k{|l4O1Y7f=USMLM(%{m{370%WzoD)9f3(qK83xi_d)ondElE*na@V|U(1PQ^LI z(q)o_e@phf@o-kU)%V;k@Tu$MvpDTW0%o_iQ5e@#el9D8Bg_cUGAz`Bndfw%_bJdQ z=UzPRev?qLGx6`dMhGa!fYiSg#ueRI z6g#U7DDlIc#w&}}Y4M6wq{I=;-g_es0))l+56}7Jq(Pl1oL`UcK97zEg7QdAWD7%m z?f}hqGea6y$E31|K{WpkLnmjXYotqR-EzYARH^n5AWdJ}c;LW1`DGU4ir>j<(8P-q z896V3@f z_-`OJCn%@ll)eh~{gS5Vu4(46MJCfDY1EN`H6!BTN~-!6jfL-AT=t1)%)n^AF%oN( z<`F9l*sB$_c?{K<=ilHr4L4mR32cqXMw)SmtX3niNxh3fO>1t~_)FqZ$w^roVJl$A=mI^yju(X!@o%jBBnIjXO)4QAPDxoB1M#wS^SM z{Icuy=rhDZlx1#9J8CV3!NCA*Lt__8sPvLkrX5C3lXu!sDr1zxH(38lI2^h@YwidX4cMQsK4k{t0Fv zl&qg!<~o8V&yitP>@jG0oqnO`>u0!;PPRT&dm=;IdH_i;C^0!)MH3jIl;s*Wd z17IR`yNxO_JLvZoIhn6=h45HRFy~>|Gc5HDzmTENRyV+?=`^h09hMbH)HW2$cX6&WyN+M1KSZ*c0~mH4D8*v?Dh*Lo{n$%WRbl52HGB*H(%D6&V4 z9?xCbUelI-nv;`yuA;=>n|}+^s;Fc!c8XN;DeU_voMC-_JGM0m zvGX@QU6KLaQ2P~DzpPOTeWlpyv{E_^s*$fmp@Bz|LhD&|P<&Oq+qBT-;6#itj@D)R zGu9^w!OApab36%n7^5QS2atfSQNnhs^hTxuU)uOo24I&W+wqt2>!3O=T?h1jOloPk zNhRp3W3@pDtmHyQ1)qpHd^i1?&uZf^@?fKr6K|^-kGcMh^BEF^Z+Djwne}2$?{8r= zN#pyO$DNCc{8imLyr3tyNlbUcpc-(SUdkpy@H~qM;gVY-ZhopO^UY*6N&Lp4ZLvk1 zaah4dbc20D-~BS}^#$#JBs9WFlY?sd&}W`TJk7c_@Dn_BuN2dK?$Pa3r3Wfb)gnu! zB0tvw-E@S(fK1vedFkgWxo#00##P0DqC`a!`b^yDd%d%E4B>N;``snZ^}I-n{R$)0 zyflYRrMC_N!yaqcx{LMpUGG_?etwkFo+>_W*6Cme0^EYn&x)}C@q%Xk)02#$yznRA zD$K_Fsux==-k)x8wuuG8Gc9$!-w&9+^|<%+howIwcpC32mPNit~Tw} zwbGa-^2v_UQ>e^d38R8cx%0a|_b-jRq;5^1cM(t1$P5Fj4xb)STO!a)I92(N5lJNR zSNuIQ(sGYMr2f@Y^v2=v$tM8fEU@myc2OOXbE96>6%}9}djp;z84Fo~=%;G+kE#g& zPyvZD!!zbpFzL0vPQE;3JAM5~ZQ_rJ$`A8S7;6{ZXg>-lBj;AQt8nf45+=pU_{4Nf z!Cg-tMzmT^#MU#jXW!%`r zoa7IQ=r%RBB`bTz+b(c+^3J_v;`b`QX>zTx99nf-jN9f}pfR&1!`8Xh!lWSA@b|u+ z9?2xRtHYJZX?Z#d*A;7itUe8Z>0jCrj#tND^nU`V<9e~mhl=$;G!q%DU;8$W*{Dsh z^u2;~hn&3pLcRM?&A2O_t5S-uRuVG8Mf(&6OwDHM;4ka0bUvUC!a~yy# zc16X#j4Vt^_M1LT*$NLmWY-2|YJkdS!EFgBY;}qZ6#1)V%*q1aX^p*5x&A}OvKeor zZBGnV2vO?Q|8p83P3D@I;jxj%N9zZ~>Vp$mLKY)i<6@oK=wvsj*QLN&)|ywv{Ps4z zjEHA(;}ik>N`H&TX#kMS*3r_U0}4DHmM&?RsH1tsh40TMVS@BtFH&~B zHun3eV_XhYWd_jB&KfOtB#y^{}WYh+8Y4_NvllRRfc095$pWJ?I9Z=B{6gm*@HX)ft|jA z)fiCRI|$T`(dvJ0t!Ua|oWZZ%1YDP9RgVD4)&#)Fx%7P`Dz~j-&PA=|TUS%HdHwC( zy70KPL1#eiy$Cvm@R$3?tNwp!0kWRTCPz&H0yEcim&|HF48J}%_Egoj*R)$+zMv#{ zEO`wyBJdW=)thOdQFbV`PWMHp2=@@6Y7lu3UB7J11=7DZam<~yewptu-G&M zPH$An=PLRT=s@Z3wl|`8iSSNQ?GaS|X!x-e$t-$N4#}V}*i2;r2m3aU ztK~@gI%21ueXC|)G}*2UetsXld;X9g;tFLH1{r@pP)1DmR3^s)N-H&Ju6z0RhbVwE z1c(IPE2(WBmJ8sTxqkye-QmREd}GZ=5*{VOS{QqN%n#vMHpJ?`q%U7SxiNt=I4vR~ zodMlR<4YN9VCELXK+(ipgDZLPQBGp<*kOdJPsKn7JpW4!6a8Ty0Cu<&A)*JsyC+&a zoYAYXR{|>iuz!haF~F_SDS)g#0b+=OtK&ap9y#~GME{x4jL$jOhUk=PvCQBDNjjYW zR}d3p5w7^N;(xDu8FfqF9L?N)5Z6T#8OJrGQW84XkUo1VW|dCumk#FM{#EsPQhU(h z{(Po61;@2fB7IytjDw9%A_UEmO`P~Kgv2fuz>rFaz!_zHG5lhzfdD!sgr2e=$1TOy zL6o@x2ww(jLea?hHQtT^+QRSNNHhH*ZzYW|7ngXunhl zimcQ6?RE3iTQH^lI;xG*Z%!B$*28amc%^~8mqn?v@^tXxWqD+o1GR~-B!G263JLqf zV{@$z>Bok+zn+=Qr<(6X6OCoZL&QT=M>NAjBt(oC3q)*`2cV0?;)Q0V8hfnRUaUit zez$2@0hT|Mv(ki2&A&DdbDEe&0aVo{2$XUX&y&gGQbjzKzT1KFSu|j;@=M^u?yP?n z=hiPAuY^(n6CFPWU}@5z&%$UEUU-P|`1mS#h}O({SYk=a!q<}AKv`Eu9`b;H24grK z={mYuVGydRVk0Gpad>?7zd~N<|BvM5NZKs{`Jd94QSWV}E@FHr<@Q*Xkid#ocL@4= zZYa52L-Ic68A`yOi2N4AOe~<6<1_st$)$O-CAg2_IAa-GX}kn%2aR#_m9F5?9b~Il zK+5woAkvar^1{uRzoyTc?vE2`18P@qNX*Ce$O?Ksv;}s&H-&0Io_rhHXGLn&0R4x% z05ob6ptRFX+%}EPprLQy!z>#}1iLQip)}R!J%G%J;7#awoIsx>j{7zlQ z;WY9DGfUrQ40jLAdOM~F@aKQ%pkil(7%zm6sfK}ni?FU=vErQK7luY5Jf9?wybK^H zibSz)6}FwtW$lQ{qe07JDW%1~kNbcizjS^H+*HIbh%Br2?n$h=Er8 z048)cSsEq@^y2|ZAWLxH- z2l=4B#?=U>HNbied17OaRktO^X)!{eolJV)B-XHSNo@cRq9Cw58OOsqBE1_{v33V2f4;C|CC=TyMDOA7fokLLQR!D zgR*KOy3*~HK9yD5iK?T*rZN6q8gu;LC5^#7pU#W`k$W9=+Hpi|B<+M{iRyU$-$`Rm zpr@2+^vxfjy$+%eahBY(u57PhCk6a_-&smp!ti7!QS1mCi3^N6aju2 z^wra$S?ml-V1aZ(bjF=Q-GN^Gkm5*DnAospw?z_Ax5aCa4Kfw$?se}%rf(2Smc1;S zwGh<+@&aL)2LjBTXg^{b0UT4_cJrH4Txv}{4C$JcfEGbc#Y=A=bZ8do*5DD<#P6qwkSad33iKijiTgV4mM|qh8 zp}iu1lfz(<<4@(dJp>TzXSXn`Tn;4AH}}4rw~Qv8$d|?Cu>h}1k-bfr{;Jp9D=0>{ z&insR_SRuhckLGN%rGDz-61eaH-Zu>FsLXB3W{{6A{~N~Gaw~`NFyoTCEY1VgLHRy zH{Tw8-s^hLdB5*F=luOV^K-M~-uJ!kwbuULQAf)y=-8xByi(r=t-sHlHD6gPpadxq zQ@)cG5fX~cTv@wxpCq7q{?7Qd+PJpXB)g`kpr@XbMJp*aQf$UqGiu!&&t_yg&{8gD z3;yG$sjCduC*=seyCD6!iAjIh_?{*ky#^b44{YcX#XhB4J+cS}A|o}=Z*yJ5o2)*=?$?<#lj^jcSSq(p3-bSOD9cM2r#Hv{fU@YOBjbk` z1Dr@+DxaS?cJAR&y;wR!W~Egt5vmCMpJ^~3cRl6Pvvmp@dAn7G9OZPU8+r$el``S5 zrRLwer7kfk29F=;D&7Fqh-ItID`V|+r=C_GCG;GqG|QGKu78;Bug$v1=Gaj|EizYId241A_# zcDC_<7Ikiw{x;07nonuOew-wY0~3TkwF*u4-Ug+ZG$_SXVs32co|4HN)B94Ut3I>( zt=C_t?sM8n&10N3*VNx!DZp0$gOZ?rfQ1L%a;1*JuPCTIuWo<4nQ4+NBKkSaRY8MHwRM|Ca?!Q~!$v^c%@{xjq z&^A(DBGm{aNUIMy1K)kRiR#enOO17GEj#FAXbt0-HWw=Oy81<+6l>T_Ri$Vlk0U== zpJ*1OH5;04rLM#Ek4irK#+0%u%#+j>l(!jZfW3oWRT3sPk-Cd!Vo?kMxLpbJHafZpnDJdq}A$# znXXEH=Ky?6%np2kZy2Lbpv#*9AQi)p5Zk?o%;xB-+Xg@JC;OGuYIMOqtNn4eBZEP0u{_uA zRY)kmiJoiK8y78#JNEoKn2FvkYA@pp2SL^6(cg(KN&~iJ%nHR8F>H<&ETnP*|klUHl!Nmg!K>OfwBAqIYlF`@Hh>;ylh0%9SIqUX;~??ZMq$>GMk|aa4ik1ssQ}F;Unxs&8G{)7Zp!95#Bgzb{j+GC2`lW?FUKa5!i6xP z3Hs,RPTXbg`O6}Z;j=Zw6~Jx%4bpG_;S{`}qoO#%gm0S0m{wG z`)+cEx9ibhWS^MU1wEyx-&Me1NlXBGpCcn%ohjA)m!QMI%`?#LNq_D9*s1-P*^G_m zbXlcQj6PtO!LVAs`cm@6G0$;iBh#_^%lVT9e-T)wmXukWj zwp#?iGqn?Id58A7(de5QA%ictopftXH!47b&cd3ei6IRzhUZN_m%<4g4F8R{Qp0D* zaSPTm<9|D`nI%x7x}`2!;$Ug_aP7C_YxjJwY#-)0+BrW_&;SjT`*!OI#sozSFf)RZj0 zIVvFnkZ~ZBu>oSO*5`NES)6UwKpGt!%B|80bV9RPFo2oF@2&K6f5%mAUg^(d2L*A_ z8URXmfO?391h2OWly7sO;t!8;m{0>`Q(|=Yv7pO|ofsek83C7za0dG!DZa8fvPcn2 zZ4yu)wY9b;0fVlM)9N{=M$*SE|3+bv`rl(IjA>2>Yn*pCN)s#34@UVqeMp(-LIAd9 zUOfEBe%Oe*Z2Kp@QPU?%DaWUfxc4)7uZ_O{zac4LfN^#!eZO25Hh`)Gw>Nq0KXpPI z-deRf&fA`u-2a`$GC$&e;0%Z+B3$x;X%0brZtk5CHyet0Cp8~Yzpe2hKrxcA-LFs|y*{!T|KtVbslusvD$XHb?=<)IF`2OS z!e~Dzi{pSN$=9}*qXW%8w!Q7Iy8oEerPbh1)=vOh2}{37s%_7piMIVMAac7sI!N$W zA{5R|MgwqOS#F?Kz`(R1i}*O_pRO#9aE0Q{sTZwKK7!HmAeebIibL?o*-B@rG|aAJ ztiqVxH5>4M)+SB%#oRUxqlW|4fimq~`z&U)(t!V>BBFogc(}2yj)JBFprSWk3hNj+ zj2LcB)dl1odxR6MjTiz+W~J;8+r>^ni!o7?fvia8zc@-RcW&j&M_(A%Wmd0yUz?Hn zNQUHL42%WGtt(kd9q@fhuFjIzzDRr|rWq3df=@;eMvC)5jlwv2=*9VomV6Iqn?z_xU=aL*XC}#s zuUAGaf!>o(ixp4ZjTKe&wx4VSmP!2osUSj%ChT}W{2wGl|2HT_bAzPlIe#_BUBEBf zk%|%)tah-Z?Jml!Jn5CC8~2nfc6C`E%&z#nF|MIL>6CkB3?R_Gcqu8G7VY5-CA>M_ z(#V;Q|Aw*1(g^+s)8(W{b7exs`HjKI4B$)=+9bMQ*d9|3M;{l_;pxDFO-qKjx7|-Q z@sWSM6fnv=Ek|6x%jpQWIt3h-rx3CG5cyTdZzOy<0CBPY%@=Znp}D*QG?!%xTG9Hj z8WcpIFiUxST>kYl4*7l>6he=S07j5P@RN%`fPlrrm0=CROE7i8R;bEJ)6y42@;tmS zk4B&KA%cm(Ox61p!hFu72nUoDS^NMmNy2e>-22hzMsDTKCwhp0XP5UVDMX)7UAAyU*jp@NDH1*BC!aI7CM*ru=Ep@gmh9FOFREGtVqa969ljZ0&w z*hW5v8>8?u?d-aio)-F5@WEn@dU4%qMGV8}mf+7)Jfaz~Qkt_Zw3=5+m0m}DGeMQnQJDY(Hz*6;gT zVwzi^Wf=YZtC$+cfIl=m+M3SM zdaGHpLGDP5H`Y?$jJ5@GuF>4SZ%`A0I8WHZME3AN4&wrMxn-*elDw z|AVlTGfHRRe=SskOGz9}d5pgz%<@~}HhMdhrw8NgcNYD*yn1H0R{5TkgOm5`#qZNL zr8vz{Qs|lC4tsuG@6C?}`JX_F$jb!udUvF$oR1#<6^uzNcNYc|Rub%$kISUM6@Gc7 zI8A(5#J1>hW<#}Cc`WH+D?-_`xK*#`zdxpmy3c}=81DE8amS{TIsfg+yo`%bl}foC z>((dcNP@Iu=J}UuiJF!CuR$m9pkq}vw!RtMX;HDkB`;!1rMgc=$iTIEemZF*JNvVs z*`ngfIdgy&Rb9zw@jDi(%&a)c#dMy}Asxi@IZ+qSA4Bl247i-^S+lI8u!yW!1}r60 zEOoVh#>8a{Op<6iM*=1TbLHFZ!%b_()ZDA7Mm2s{(ECQK^KFqjPj_>_wdKbIbod-a zVhfa`yTs7ub^J@XqXG zrm|D4Nf(tz#0)E)R|sfGlHxMS_wd_d_<=lLcM5RJY&&mZ@q8)u`U>d4lSK@+B5lkd zi~7R6ULwTQ2USsej4bpmh$Lc$`QfEVDnJ{HcsBAFOqmA3)ZJKG>oZwWq@uycjp=Ng|0F2&Ig5>Luv z)o+2X;DMgFAy_UNdAd>@i-eNbQKcGne)eokRF|5r$Dm{?zI|Ps3YC|ZB6WUJur53b zr>ZUH)p#03#cTNHa6_c+~GPm;6|E-c~7uN6!I<`Qfkrh!$e)Z9N zbRAi`+aL4>IYO@@nz{NRK-KJH5rx*4j`CRs?H|3O0E}mdM)0*y;ZcgG=I-)sypqeC zb4Ww1iL&gS`bk`T$@YrToq&*&;#$ZcVkmT490zg6A~UdU<+gYvvUnld-)f z!DH(*kdpcjO;cWD9$gf#`M)$x;Wj@YSN`{!CUtB@Qlw^JA**!@duUoRjm^i8iV5=fz_k=iD%1S+(J^BoV&|V6_rN`eVIya+3*S1QSy6H(t$|2jJCYI@PhBJ^scu z=BJJ=C#qckAn*$0^(g@MR0j^2y%{%=fWu5i^C4_7vbrk zJZ9bm^qLGarDg3E?Lw~p0{0b6S9*zp+kmMS-<``c$f)-omOYm#4z|&y*!j#ADn>1I zq)6yd5#o+WX5~h;bEdp>$d#F-B+HJ7{JJxVv5M88EZl*rQ|H56w@#PfhZso|Ys#A% zd|GsDuv2Tgmi#k>OLLWN^gZFNo3x@%@dAhZ+<*8P00YY$*sYY%&oA!z zaoxb!$=Zfz+zsJ|!@H zG964Q+%pRF#Qp^VQX)Y}dacKnoWIUJZS9i_RiLcCf4-Be~XpL^(9IV2rg7Agdmh{RZ2QBKD*^mUn{+e&Zv- zpNtPFA`6Z#x67#9ndP=o+>CP1R^#ZD^gdJESaR^*q6|=4RaVl+vkiU+(4DAjJCYdl zoDq(v=t4z;E)l=4F+g|q2?vU+ljrELrpl>ReJ{;vc%}bk(0p_?q9A7O8uUc8SSq1Q zh*J)3%oBHg{ZV1CpJcgcUDaK)i@f?Q1r%XM*T1*`ILQHYH9|hxtnQfV@H4Qdp%xf2 z4LF}2Qiw2ryo6Zq5)fdJ$-%nE%uUS~V)Q$;t+yx9=-1YC-Q>>l@ActdKQ25D5-bye zS6I01NW+U-kW0 zJxrTyf_@!o_2*-#VxG^?`l#10{P59ngSp{A%k?X|nJIj`nStMcD4T6|g^|&>w6$37kgO@2AG3T2+A#Y#35i>t4_K!E3YdZYy3O2>nS!K+8P#Q6gX#6A=4b`{h`OC=vtNWy3k&tHEtQ^~vpm0GdpDUu zA1JYK5HmUJ{9(UYrK2j!2y6Jr94;Um`h>s&Kg7rToQnuP6rx6L!?FEa%*A+_n}{J# zUHKl&S>K;m5;2V^GVG)3;KLe|T?ArT+nHb0z26 zEmf*_D)sH&p#Ye95)BLwd5T)e9X0tcy;d|w=JW;_FSPT zX5L}DkB~dDo9Al=BED+hr+ULQuqKb%HOe2!{qf2+FT;ajz0J|p zI5G4s5kPDG1L09S57frbxUpT845Bv5@S{Gi#Db1eXUV!2AksbOSq3x zDw7@LBV4&>aQqo{#BkVYy|XmwEzd-* z&zrba{glyx-8g6<@%aKtvYd1Oy0g~(I=RQmf9{~#cfjso`P611ydcM}fqi6V@h#^Z z{@-u)vi|ceM0}EEC%L@YP;{C4J2eqE*T6frr)QhHI3jC}fVx?5AJQvgJWcaqx`KM{ z^I;cmmh;Mlkt}s^J@U^!%)izD=a7owz)XLB{tEAPcyoJkLFth1bT1Oa8nqxTy=*%r zmtl&!J$YDjI=0Zkvmn5E4Q_TO45N|N0P&~p_17Orh^)@=H$trTAd7L24=ztn%=Rs) zQ0fW~5QMPcaJewyhLM(_+p_L~mR~+-1gyPJ8arGS35hSAJkNwP8=P&H>%`laGpg{Nc{8rroX>` zLV_DC;no(mJg6Pc;-w`B_^YD0d5$IHKK3@h9*4t|h<8*@vqx-9CpE?wI;?w3N{@X} ze>Y3Y-^hpiC!YGIM$?trUw4}g$l(VP(2|bG2dcyJ4-iDK3w?j=Olp~IqM~cZcM}>& zdkY;DmnFVC)fe+w?-cyE&waX6pngJCXcCOVq8FlHmPIwX(!j)|bSGLr^jh0JHraVX zUUu-ql)Tq@qhb-RvI%Y%@|@8x8GiqasRT2oPkaSI5+#m@M>y!@T*Hw`L)G=gkEJN0JF+={k}Y{$WEENcER2{-GhWB`dWYu17yzAPe$YM{k+4aXLAF#;Vq^imVa*lFq*F_G?)ZRLz zt+!~OzF7D8IGkuXfImti4rzQ&Mv}J;^tSHz4Nn^=I5ZkFl#c{3!7kEqcwQ;9K$-pl zf|x{Aqg+`c_ALK&!y=|+1#Wv{wXMB3Expr|^kpYLMET?Wf42W#A7=YwGelatKfkk^ zRa>;$wXAVd7sWfru{2OrzeoQnH*rA#=@AK6_Py;jV z;|m*CtZ;aPHfXPl)4rF^2Y>7{l@m}y12QvS;y@znK_%-+^ z+bBWf&|^QguW+-MmO)y0_Q)`;u|Jfj3#|e z)wI%^8Y$w{5a!$qQ~P>D;idYGX8SwB2v^mM*rNqJ+Iz3_bV+aBgmaPq6MH5990eJ} zqP>pOMaBBE*0Hc`voiwm*2IGc{tL6-J$rJ0qNm?w;2!q9wGdD!>UJ`k;z2ZnP2AJhEOn@l}z^wFe0b*ihy5V0j9P$O6Nf4^)XYd3~o3aqR%Dei@ z20s>@E**07Jg;WHe+IZef45xz-k*zvR$BRRV+=qgXWN8^K&>EkZ%kqLVq1Q+fE9^J z(Z&!0AC?VGgtGjj59>Yztlg}U_r$)2DR`8#HIl=@p`y6<&-xT%)<>3BOzy<_aHCQ3 zQcN%CL+iseFHHa5vv$wuck=pHvCXtMHCw5}YNgU+b?4tVG7EX~t_N=74q=kCuz+u{ zCU<_Lx%6k>;5k}g2&{7U4VYN9Hteuck&4rpYwtZ`BO9kN*YJd?bth`_)g%<(K$CQt*s+(t-x21pQ~<7%0ZCP0P+tXaU_vAkE~$`G=TwS}KS>yBAXK zPrbqL*dc^1?iL_RWw*xlL}mG2@edyDAEG}#@`PMhX!_?wh6-ap zC1eV#-uDwB;4@{HCtBF&T($*mx{##jjT>ifPK!`&>CkHH;xcKKMy=KIPYGFBy;Orm z74}W#emSNM?)q5LhhJX4`ULCMKsts`$RhT%VJz3bdj*n^-M_OeUU|6~wb~w2RwnYR zqn)Lsa`8ist?@#%&}-M-rd^v>2N@K+@<(_X_PA(m*5w>2hpF!WW9+)5>Ip;J~Jc)CDD}%5-r9I^IM*K%n z!TvY(G=fz=(dt?Re!tVbAfy2K-w#}vAg%c3j(*9!aKdSZR%Zzu!FDV#i(YXA< z5WK!n)ZW~?$pHqpxLt>5d%w0eg8kwe(>xNb*be2i{EQ{Y5Kw9udc?J&(zqy<0@Xj0 zeqkI>6F5`ADx~N%T+sK*Tsv-|z-UzXV9(?u&2{K5yp^jp@I}aXN{Tr0T)diQYVm;S zQ}0=;1-%WIY*PJ?@GNE$j`b4S8?M(O;Ds_T&=|%U)U+i8J-LrY;OaZ-1WC5kXtB99 zu(SLQp0b|a)X9Cu-$|agVW=U4tHoL1ZKVQ zT2hyU1g7!uB3Ka*O6~?ZOl8LCOmCDq%>Lkwqx}?3CXVghOZ;MwHf;+Gg?*}Py+rBO z*?owYmsbL?QBWLPywx@h$okEopLf%KP4#Ssx~3M$CT>~mE_SIjP6KCA ztGO1^N??*u3VaU&!<3!a!Q3Eo669lkvp17FOd1S*QNe1$1#8L)n-85IaWItiEvFgx7+(pfH;f-C z`Wy4XRXZOf7Zf%r3~2~MK4JQBt~n{a6qtLK}K%F*WNnW(HXW?;?=!|#r9zJZ+K*Sld|8ekvfOSt zA^EE`!tLaD9GxAhk56S{NwI*BO;59ZDFO_6o0Q~$_=HkKAU!=@UF^1W)-A;HudKD+)0Db!{T^m2*mT0`yu z($EX%^KrZ1&ny_E{M>3+b8EDpFk9#yY>b<7B&Lva6Ur={EKA3xzW>?yM6>0^Z|`Xr zvNSBH+Sc@wz2#ilJ2)Wr(&ILo+yCB8rN>cjif<>pMVn<-^GrFpu3(f65_QWYK~yzN zqymJXo4`1Ho>iIxFh6R{^Qen=>XhjMd5*SNPWA>_88DMC2ID;DjBqmJo6xzG=%kER z#5W+07fWe>21~*HD34QJkzo&0pz?|!P%fTi%k0%1HYZ1QNt|X0F6Mwn-BEw8tLNOD z;XG?YeLccL9d=#S1X;8W4Y9+V=Lu30CYPJEH)+Uk;xH)N3GD54NDAw4$}K=7DyQ@s z#9g}Lc!+wspnEwua?fBa@uV%EvF!b^1@(($MJo&KXXG8B!&pmm#y5txhH$c^TP3lQA5-g(=%v*?PHty<$35k8S zaq<1;?f??@jwc_UsOzn8=?;e0+VDxfqs$9Klae9O-|$+ZtA1CY^Kw!74x*kDVVf}@ zWOo=$yv~~%Zjf|B?dl8Iz!(Zs9ILfHg~AjjUTI*jalz)#2B^Mq(@dt+k9J)RB0T+y(^ccP&wbQ{v#Eq_ZxAXSSUYw;r{J?@%C*x>= zhgB0hPxf)1Bwfj!oUi@-8Xdu^bhTF*T9E@^;F`+FYuEY?$-^d8s?~j%ZtC$x+vu9btS^!EPe*r&fD5Q!Q0k8S3j}^>Os< z#%7I!beHY1~jK3W9i@wUT2H~n4!_x5bFC+KyXX<(+i7Fl#d5m8o z3pr_7&8K6T5`r7H*lX3_dEjCws`79WWG~3@XIgnjjr!pITFid!fIl~fD%(N5CiJh3 ztUJ*PtaHYTHZR~u@W-x;V8?TECqN_Xo7-8YzM}RfV(z%%D;lJ5`Q0|`@$#uUihj>4 zZ>Y{v96S7JW=DUWju~uHVUm_-H}B|Z_jhof;hf>A9kcvM zLCH-U7?Y*lM?RdK+Nyg~ljIWmeVkgQ+Q%dPO6J@&HaL-fcr7FrPbmX(q`3ogq=d;hxN-Ccv3e`mN3+u_EI68f*2XfK zIln)r?l>5I1U;I(yr9P9P1V4y{PQ(2GC;TzJX^W6)wp~}7q~B{rawh48tjDgBcT0$ z>bn?P_`vMCmIZmo8=ps(>t*od5<5E)a zBbY}m>9KWZfBNSGnjPvUl>`5MEStxkgEF-;Nc-u)V@GD9&$YX4v4f6BeX-T@!Y=yl zwL)NmoN!yZEmlZ%PX%;$5^JB1PsaL4(QarRb6VXX;NUJuC6ai__2<*Ttj2n8=>^kq z-@z*n97AH2u$4=AD|ZYZQ)BAH`R>NxN1fHk?Cs8ShY)QLlnoJ3vb?XT_MYG}$L~52 zW(_*0V=rNmaWC)~Ppcmvnvdr%^wG_Q998@IK#s0CW0P5tL>$Q{F0Hy@3oJ?_obdyLuN8(2 zxsGx`fR(%lnDfKZxgc39KoLy@lo2l-gen6e2TApf$?rLE!#64i!-T#B=&WufZ@d24 z{49fayx)#7MGtEr?Ugk;to9RX#ML`}Bm?v`y#%TUq*iv62&WA}D|V704#?@jy5EZH zg_)bZ1@>Z>rt0C;4FNX&&r*Bf+9$L8SdHfFQ6zDY=Fx98@?tSwtohn zh_L^uT|GqDiSIM@oBzOS0&j)pu9X=Ds!mt!R=n;}t8??`1OgskE%%(La*zm?;yj$e z>uao5jz85WFEI;%?ov$7Y}M>*<0Z zm9t%6T+Lou(Mk%A&UX{9vmU|BYIvzoz-&b7(IYR|1(2+IL_3}IAH6$6KnYujCCD2EI6dsPC)(Qkg04O%Q1&14j!KN=pmcfv1nFhk^%0By1U9>2bFjnfII0#{ zYWrmDYKwrtgBaYr^I}y9;ND`fvIa0heN_%Ke^c+~%iTgSog%UVRN$S|am}rzp`iE- z0Al@1MvXn1kq!&*FCwm=J=uYEi@;WN?OXnKT)ZQEbSu8@@#6q&`V%eD6Jcmd(Uk-F z{m^eX558B#N^MN`5h!m1{**>dV#ZG8#q!By1#MP=WUq|Z^zwSOiY`%gq# zaAZMLIm`3Al(%+o)To@%ClUwXlxSr(f2n(n%?_n47N>`rG2*7(&%IQR;zM) zdlk_cMQg_T_dDpnku3^$E_6Z9{7ju9Oe`kvi$6`KPmS=8k@(${&ZF(6FQQRy11WTQ zs^Ad&qt^1dM>7d4`o-epX=lvWh5~p!cHiDs-lx&~Q=gB$L8ffo{NV`?+6f9NbB?LM z^Sz^8_|wqnd&k6skH5=ONyA9I&R*U@_s4@u)t;n9UMQ!9vV>Luba#^s1;IzA|FHpAt9k{pz)u2EqO}o3f^v>1;#q9B2FMKYy%mo z#`ZZ~B>|m+hnqVpx3_^X^1Y8Ktf_Se? z-hDb299rhWZ;M}JnG1J4BhGW93tR;}m3}4N4+9{cnkPb>LFT&!UbjGSa^i(++W4|T zl0~+1-Oe-L>EuIY6QH*dXAsxDp7ul5qr!9)&$&R;N1)S7cXA4mJD7+ zF5^`K)tpN|+gqjFz(5l4ss{`(q922M2jSI-guP=`P*yW;i8{)s_!k%8EQ2L%iD?a5;{ju!}`#$ z?nh9L1b$&*?!)B#)(q|K=|5RCG5lo$s!oJfT4$22 zb}tpJWq>v-f0p5$J7xO??7Sb4y)u3aYtJ`GDy;qAGmV?EN0Uyb>l!3(&Tr)>EA%$$ zCeo4;8m&6X|71vWEYRs-blP#JyX;leg$e}gl7PpN-bWiwVRUy>h$;j#ZXSrApI^S^ zq(qi_5k7dhMc@opH}eLG-lAN+AfgPVT*^!EU^{v>&3@PRvxF!e01MyoM~h*=5;g@j zJQgOnu*;P*tK1*qmS))VbWnB?Dcf+=({PXRQM7K(ZdTsGdcL%``25Y9~_ zRUT{KxivC<^L~3D4%U1D(GTf(YPfG<<63Ybj5&z*q^g&M0)=IYd+9mqDffuhXz+YQ zLGD5Pcd{+3kByxHMOo$<;aJ)8TL@w|HK>hRW3*5CVf}>S!`P7q_6plHsbT|i>lRR+ z=rW@#Um{Wt#VT^{3sTC>KXgLdYAQ zotE$I{R*JZm{R=G#p8n0a6mi@Bf7k(%&L+kA^FbUP^{q%A-`N+n-4a9D@qTI9)q;B zYT=M=1wk-rQ2g5QU87bCeo0?9)>P>6;GvDmXnnzbv!`Q*H04^hZ~bpvsCSroiVRk# z^K8kqJh2Lo83b8M7E7Q#f$F)WZln409`*+nJLK2jJBEkK8>mUi{TXFmULFZ>jctws zTy(t#FFo*ZGRT027zkS=YxLFi=%`{vVq<+1 zHqSV>sNDXE61Ynjcp8vK&kWSAAs=Qhz51Bau@VWRA`9#<-5$Mm-}05|YvQ;&g?BfV zt}^{hkO2tIpPKL6JnqC%B6`cC=ROU+!g%KvIbQKh|(wdxU zi}j*-s?U**JAH^M*9MDs_%#=9aN|q*ArU-Lw@1&KnDP3!q4-FUgqK*1hcMPUo+s?@ z<$=&^q%*tF5tRs3EF=%~-+%_H*meZKHFI-6Fn~yvGZZ@8aFy|q{Y~);k@oOuMUMca z@ioNhN-LG=Oq_g8>c&sk+5%!UGdC)zGX6mW^?^75!a%Uk(W*!Q2C?9wrz_pbveXz6 zlULkkq{rdy1n7sZcC&zjR>4==R(Ia7|H%9zwO3WR!}{6pns-B_w^*SmmGEvb@*Vu) zRLlfn)+HUysl%Vy;l67p5QzE$Kz(p~oh?dU;>p)|Gdi|BepX-cwg*)?t2Edr#ww=j z^7eMQ<$6GcTP6LAAhn=&bin zvAX|XRppStEx843uA{zy4yq~Y${!djemjT{Rk!~ zFTb$b;RyB7+N^XiW)fKHs-1a`Y}b!ps@!EXV40c)>!=9%CisFbkkgq_sM)ZQ1|HYG z=%jaimF9yk%Z=YRUVI1LqkM`9P>`gNL)D6+>>xvXyaE@D2KWf58Ku_r&31GPU%u z0xa*ff_#_Kn}ZQGP^=tms!Hg?PHe*xA{;VYfxko7XF5OG_b|hvCP3rNjjLQ3{$~F+ zr8r(xsME{%pxzq+iV0wX?!YQ^8(^LO@J1$`*6X^fy1FwSo!SzlPV=mtYcrkFZX1;i zGRFs53?;Qzj2ZRw@n~Jveu5E(kE0*$nN^)deumwXQpz-?b=7 zzOd+0qoWNGd$PKQvfr&g!m`_8i`BmNrczl|rC`xN?L_@_tkV8An5bZ_x z5Rx&PvtI*?7>DAPg>up_DIo>gdPp~=}vSB;|;^%r6Mu%4$xQ!+9#xax&((Eo(+k&EW)gVuu7oC(t1 z_68x)u{k#CGH67T`pEd9l8i8*hcD8IHL#FrXo#4`p#=Q>3?t%&-6>aoUL7l-4I*R?+&TZ;K09%IaSeJ)YKRXjXhL+D-k~G?c58mhL>WL* z1TKf1D{H1*3S;^Qy4wa_OdZHATe84UwMj{+d1gKtb@~N-aC&QOJuL`8$LXlr@7Snl zA+U_MXrL7%n>bG2@7#-kuF@z~P@?oEh;{)~rDux;4}-(1MtE9?H9zMxbAQD=dJ zV}CEEE27Ig(ya9?GzO=KT$_JMe)o?H{apU{p7 z-N&a|#Qnv4q!cJND|K_^jUDyQHzp)*1;__g!Yj2Ld*kEOmnHj`1VtM(BQLgxMOYp2 zpT|fxX^9fpfgZCq*`F3l=8N`UWtxLmA_qklEPvV+5Js6bvyB$~Ixxc=7KtadD+iyY5GLJ1NQ6)Q#lK%?`!c zf>AGt_H+KN@&4LM6|~PVQo^CJXI3v7zVb9aVFza4Y^49eu)rq7mfItAy(Dx$qpN@e z@~peK7iHKPr#<)Klf!r)CAc7Ifa~UaY-3g^nF7EhvrW6=xX$(-pC|Z_{XnYatHZv4 zVN#K((g(X&;8-*g>f{W#+VBl#;{$+Sxj((S?K|X_)Ywz3AZ(`ChjAC>Nd}m`hDNpT z|Gp|NzunMg{$A9N1xiK&yRq|aU7_y(D@jyM>EqmH!OpTam=>~O1CW6;+-qQzaKu81n1u6V|#mhMqtVgg8= z7S5Kj<5EkLgy=zAq+=lY`wqCc5tc>J^h}hNbbNGMS1>mXd2VeM3&}OOjUws^v69x* z%_;R}SU_UtwH)mbF_eEyeK!I|kJX^@-X4$q8q0AQi!LqtDm5RC<8HM!qarty`B10p}n6cyWTa8o(Gh=^`jQM)2th1+fGSr;;^2MCEo2vfTRszOV7Kck`IT$!=QhB721alB+z{1dVh+NYpK z1~ve?hY$KD;VGEwI%!=ke%-0KOrqgWBx6HN9Q^6@Uq&|IFH3qO{ot@qNNY80A~s`# zk0QA6% zMkJWKzQ5tf=l|%mXS644kHmdD4ctzt&@!LLg?Id083t zc)CtYe~@Oq*ilN8+WXbLKKi@i=GzBZHxAkb=bnRDJ`f!T%QJq&sHgyOeAqzGqTm3^ z7u3!5Xm@!n-@9|hy31FPt>4STJEFmfW5i2ZpkiLz5+hJ^Ajp*;_EUCgxb>C&LCHz? z>n1${0`X6`dpKnzbmknKx__MGa6tB{){UZZ=2!*T*rDQ3wfL_9Dn5qF3{6)ucScI@ z?VH2oT*!umDP(+Mjn+b#^TZ0U%di_aZjLGpFgKKdWF*GTL3UH&9#a9iiwfhx-LJ3T zi-nV8Ks7$L>2XfNmb==Z?Z&|i3CP7(b|d=yb{-UtvEsLVy6={gadf`O-I4*>wL9{K z33fesfxw4TPP-U-^c8P2<&Uuhv#nM+bmjb?>dBc)qyh>8nc2jyB+qxAzAZ)+(sZof9=sx)I(P<~;ulpg$jBubOY zB(2Kn2D~elzjp<==)1G#{rS-9-w!F^Q7qnZ+&cIyQaxQ-!7yVvw_cmkg+Zr+oDJwO zr>E~5(r6w#TCS}*$m0p9haVIP8gl2T^a(f`Wc7|79&QRxjYpo|v{&RQj*;;ufL^i1 zaDr6XPl@sHq`*Td-M|&`*4sW@eHj84l0taW;FRXAJQq=cZ1L_*8}IJZgQxNB!z9yt ze2DXpJz}=q_hhwR7Ie9CBOaqQYAQyZv3ncY{$i<=d=-|_ZJYz0wALi?kR-^h z{SqXWpy!uJEOc#7er@-{PdzQ#5r%Op+;bR*K#hqLLU-nFAgTmjfd4fA`2DvTW$CfZ zYohC#dDr>W`u?Bd-aDSk`28P0=QxBYdu9vSGa($=DMu-z%r=I^~nR-lSp)&3vl|jv~`b8qKdhRtG6xdVo-|<%A&O zVik^DK{#oSv~=yv4w6_D!lGMN5P?pQx7U@AXMM|o?dj+?T4~@G-wZ5yeE6WUVg)Lu zuy;GfbvsHmMao!tQ&!wQw6y%q;(Io?Bf5e!1Y6=1c{aZ6EJ*Nrm$f&$M0>Nnco{1X z-!R70bXfcO=A#kkgf`Or)z^?8*ZD{M3F;D*rv^)8oZW$g7{_-OWzx^dmxwSqM zs4F*f5LHo-(ej2QKELMCUCpC4y_Tz3wQrV$F@;1gW+Z9$0ZGq72mnvOG-s*crkU|6 zHu&10yO|=3$zfeLbNw8e(az0Y;wA)FRyPG?m8eVs;q%Le2gq za4mlI%#Sqrnf~!w%>6cH27();ZeMV4{=ZW=f1e8=g~F@S*|RaGhI{SKMf}SqK=(Rp zpn|Nn6qMf9tD?g@S3l&#NVsM?yjAL%t#FXLiVY5OH$<8bkm|oK@q4{SzgtSMEwT*}>;XaR`uK6G3%Dq=ltY>LP_z};4rnQ?*j%rS+4Bi6azk=L{{c7MVw=U`?3x|qX z>UVu6(>r^lHSUp%r0_vU0S=nqvKQ23rn|hJBxY2iD~w|tetU#D13s=&(kB^HVHM!# zPdYiZeKLz;8;=@{cGhLxW&w=``D@ClBum>T z9Kst?td=FZEg~XBNQ!`WeZf(L>#iWWa?1U&P3F=S!qc`%~bMmzD*+Vi)nFZ%kb8h8f}Is z&)U}v5!6*z?le}UH~!!Tt{i~A%r#K*PAkpQz-d@xfJdWXlu-`-<_*Iw=b~4?tr0mzEa?-^X-wECRp?U5T-%kuf)Wg@Dz?g45Xv_xBx%uRXNck! ztzC|9461L8N(yiKBw8ku#Z+8zj`jF}PK}k2uw+0W*$4kWoia$2QsZx0g5Ygisyvo1x zsQKop)ET4%Q(oWLk(^4P^bL=u4iC^e20J~y6EKq?Sq8do};CHH~>^l8p}1mXK$`c5OB#N7wK1OK;dPzv~5 zs!TLKHuJl&{5ZJbvm{(YTIP}jZSUs-a*CeAS18Rp^d8NYX~ks*`IwXYsw7X*N~t6( z9l}KfK!nxp{&$Px_gQvmJ{~Rnp{45 zhpSSoiXkTLRvPE*Z;g^iqGyyR($gP`r)Q6gD0*)&GnP=FJfwTBUqeaeK7HLh05E@% zP{@r6n`NH_WU%p()tDp^-OnKly`8%zzm2i~Sm1keTgKdSuaNxgtOq0WZ~2OXr9&-c z!$ZY?m>Rl#qwv^GRz_Qja8CH2ZLJ5;*4nm+mr|c=h2l9+NoL$UB3y$TF#fPCc~TJ9 zc+WiTVW)W`Y>x<@=t-M;2WvszK3lUcd@PHheY10+rPeQGh%wlQhTM0leiZAO^@49W z*Fi3Yz@-C3T0U#y4)5Dzf*Ut(^qw-7E`>8lJf)*h#G~Zt#*g@b(7_8p2t_@k67-f{l*C!6^MrDgrnBlUbgjVYn4%w%rPXa^5#W};Guqs$Y6hccZ z=#!E)%01oEg?Z2r%-g=KHqskNN#sXUG?=2ensFqjQI>;P;v=H(*Y>@7}OZ9rw zuWL&IgZP<4m+BvSznFaPBJ&+s-uzOppT*ehMU6_*0T9R*x0uD2e7Risq&W=NJVtn> zFJ3YtNFJHsH>@xL=P=R{4@7^H3Gr^8e``a>0cYXTdSDdNK8(IHoFyx4{*i0-$DVhQ z=9pPdU6pBwdUtw*EFN_V#NUSCV{a=F!CV`ZAgVx)n!B@4Z(e&GrHU5P|6z z+3BW^hC=W6Q@#FWH%3`@kI9xNI4zC`Y>nSbP4HB21F<7D4P z$u5ebUY|K|=7z_OAp4(2bv^_RqEwQuO+?S^NTFQ5$3K^T&q*#_%_Kl;62#Tt^L~n6JCU)-FB! z9v*e=jt+jRcWg@iK=xuwlzXcd?VY*hy;>Cn=Y%Y7qmMld7d%I*cCINYXEBL!kE3{1 zz@&Gp+o(R7)9Oly4|aD6yV%-^LTo9al#Kd?balxcU2kJtjYo2T;3k+W_puw2J1ji$ zzFfCE(tq&hz;TQGF<;>{u(Ibz zKg1S~frxMN4W|V%D|VMD3W?N*AEZKrr#X;!-9o=RV9X1VX_!h+QqI>IiwdT(eBW0Dk}ymo$s=ffC=lsq6MG z|J&`aOg!tvDw9`41PM)m%cY5gCwX*DzXa`*)f_=w~#l zk+Jvr`3NcB$#)jp4M2=OCX^j?QDiaqjELnyU6K?7fg^Gro-4XKe>y5`D>q$tCw**x zcqj^397qrWZYR0VJ~K)xA(dNKIOq$&?Kmekkcj%D4=uE5umXHev^ru4TXBIvrQ$LD zuR1EaF;JDbMP#OjlFo=5&ca$er^hR|zn0cllTGUu8TmV?BP>}5qF-W^?4k3jSyQ4JDl zlb29{A%ITVoXtJnZQd(oxO)NKsu&RMyG0zyIpGFqal9oQSMiMbrQa`RGbYVbJ#qD$ zc#{Fl!E`*!)C1Q7G9qz)c!NWOK(h+K)Uw9ud*?;OH955?;SF{TcHZgIBiVqCDlQRr_!z{eUE2SX2vR3=96g(KMNlXjgs^AXuH;W0g^;`@%uA z%MTxv+_AnX*o;nP02V8?u0W;##|yyv`4T^6tI22DF&~zg z+tY3W=KB{-l*d2dAIN!@X9$ z)A3&Rb&U}DAby0QR9O;4;funyAo;HyQ);175fx?QzwQ?q-Fycy+QF-M(WXX;3*6`B{5LhE>%7iBZv{p_T zMdKDW;*7X>arj+`9Q|0`R(?6;H9kbYKe*?oPY=zOup(p;rqLgybEs=?zOGIuN)~#& zT*X%BeG$Z0<=sz4@4x7NS6o(`p5HzL2%J!0fCmOp9Qsof#}S4AzM=?#A0Z?YI`We> zXi{LuRG)kx4par99qgH!3c&GDr@h(J78`106bAh(0kAPdwh}I@W44mp<(-Gm0yH%W zQGYt3D|YX3T=6ZG$yxwsaBu$s@NixjLN3*u*X1oOJh`XP*71!z^&@rEae#mTSCsMH z2w)NuqMbk4hiHJLuW>gsPaeevD=?szANrTT8_1ufD>|NHGx*4tH~EUWr12r>eY#GO zx`f-sPc3VhKlTKl_kM`tZi#Z?z~D`>H5>so0U7I)*}+m9Im0AWlyJHaJ!xuh7&**Cg%jx zscohoreVtvb)GEc4Hv5{c(@~QgjXZsjO9jh^wWl}!tIau_`yB}7c3dCO$s}GOu_^E zwr1wY5pYDLA9UoYyg52%#dHjCCV;@IdMRg$;6Wi@aLoh8lcW!9!Tg#n7emy|r>&pJ zt|i82cMWTAbOrZ^v;E3l9HLa$8E+{&kk%h|l#?U= zVU?6Y*!K?f1J!|$YJFM_IUs?8X8sO&Ka_h3608E z^kbFzAX7q!kw(V%iW6@K%~X=Lj$ig5qtS02U+)TT#R3>`Un%&mmZyQj%ZrHcAB!22 zbC*lKbn?qz)>|m_>zH<8&ie+rlf#?>Kp%k-RpA_**+&#(Z~Fi>=$}Teg@xlS*9!#C znVaUi5QVmU8Qt$JQr&kSeP^?e{C8Z#K&Ju?jum@0W!=!=7&zVRZJC2!FtsWwRsw}& zCfQ=^enp_@*&*ckfeVpvdd`1Az+cgrXlRifx!3%0%{-aY##s8Ub2of3_*+ipr&R@L zCxL9m|I8O8MXc z_aw*bhSAqh^a5=Ey0mZpXW7nYg)Vwz7=p=$NML0`FHvH22SQcz%GJB_xZZL1j{Ear5tt4*8LtD}o z_x1%}uiMru&nvd=evCyWo&`3*_SArr8@ez^&~G zcMuJeC}j|f)xF2CJrQ^0*TY!sqaG9 z6FzGC+sae#w~KE%Kf%G|N0NcjxF);PAL5)4IF)ikVQjLbZP<*jlt4EH*hNgHC?ts| z2#D4)rbOL0!*HF>94nmfY%NvA(}t#oAM;b6{8(z>dx!RznY}7c0fM-$C!<3d<~L6T zRm`cN5@lnEvhrP{$0w-dKcFm)Wo~HjrG6PXeM!msG4bYbxtCxFln)PGjStS&hKjT; zSm;U(S?Rx_ezOX`M@0jD)&c=tdk_71@bO3dbft^>>_ z!Ble`*kBVeUgTN3qGHhg0M~DY)6-r`) zxL?7hQW9ou5BF!kgKa8h(G1{?Vu>|6OGM}S$C9d9DmRILg3NRljsDrT5jn`+gsxR? zM8FtkVEUmJ>{n3q3-JJ36!+bY)wRfTE(^ML65qKUA2QR|Q)ftiW#99Cb&kFE*4aL( zN3&`T18g6FEQ+0%9n$Eu^p!IKHeTXZ4Ft9nM)4O@ph1k3|CzMooLE63GLNoqFhL}W z4J*97oLVdhBtfQh*)pxAUJ2g~A@dFOOE)KvYq+|Ae}tO~-&m}C<+u)8CLl@ljgzQN zFN7#gJH@xAjaD+20WCNdmm=3aBW6khLg&!7nvg^u79LgL@dKkN52S%JWiN{b*Mr#a z>c2_F-(I44|ES}*)P(uYR%Q1hl!Lv*ssW6{JR!#3H8(Fh8jq3UjUbYRvF&ftzF=dO zMpu?4zw2q9vC=u6Gx^IColBm6xG@b4kf?2gv^f!)$nf8JTbqT!()o3!MlSTg8nuSl zl3+|^1xVk*Bgfzsz_WV#$&i5ay!d~~C1|#qv>(MMAG&5@Qg@ep06M=Y8#i}_uL!ao zz=hJHD9%6H=6($<@cMWrg0ibz_@1OAIa?DUuK`u(1HqARotXaf2M-4Qb*J~*BbcO= zEo@z(l(N%Z{TR7u-QeDGpd5?nZMG^M!YcihILbYTUTTlY@GhK*R(#?$>G=3f$+(uC z{!baJ6&rKvScVT9r{WVlB#jrIKfIEMUOnQ2M(`?*fU~LggRH+dc}`^Ksv}0dI^+#- zK~6V0QV0&3H2P;kHg2cB4N7zdEwp~U#fLv6h~xkwi=19R!m)8no};$CtJM&H1rMa) z()d21@Y`R&aac}=p0S+^6CVHp0vs6kBITxO5bymweiQHR z3@q=w+-4nyB4I&PwHR_UjDVYQ`aD)^LcKxE0iQTAiqg2-7VoYuyyhi@TZ~bMh1mZ0 z6(qp>I{7~Xzd4ydct$a)*@uhYv4AK((4h%P;|A2GB_}OlGy96vkN^kIu7)x=Wd6So zxeN$aPX14!huEFk`e(-tXWsyjiSnx|o+v!>KZ8pPIFoJOIGZx@Z1EVLovO$na5IQy zpS{WdZJNCP|NmwwDJkn8IpopN(X3yaS?GHStntd=LokQN9S78qgH1yw)5TwnZ&9~= zy?$bwgLgUb!2e;dq8QUicN0j}WS9#8B+;BuR8Vlw`{b`N+1PO?nmW>T^Qut)!EuA& zUije@qlD8~vcLbOx?X$icCWC1CFy5F@MM4J$td`5l(7-=>7mA*2uRqFDyD%R7Ni>{ zazD9=dYKufKSud9IcLM{>6TxvX29%OcYXPZ4wYCrIiK;Xp*T=Jy;?u)o{cO}S8fXv zqezvHRDR>>HPr5jpDh_HoEt0{cc^!GTx@I_I#n);1F%2-XD@^QY}|kU%?j`#&OXFm z1QdkLfXGJP|G6s)ju$++fjRAHa^=lJ<~+SRQA<0+7S z^VJEn!FYYWjUzZbT;hFuN5_}^qWYImlBM-Xl?bR>&h&R?hOzIG^kYI$QHv|v01 zI*K+;d=4bt@qz%qhseJx`=DPW3M3Ia-483bAw!hC*l*RGmJkErRpP5ws`T{q+0hzF z(3Nyv2ZlL0+IgAtu-;cnsaY0O(XIjwLGBnB6vTn%jA&j(6`!5umIx*pXOPH06{l4w{XR*J+oM$teiU5umLsG7Fvh;Si69mzNky@Br&`k>1js)?;}jq_nh@w=&|A z)YBh7e((YxvBjKTDy*8?-2O9<)ec6A&o8nYf~{k^lbv+ielTq;K+eY(cI3 z=GJ<%2W!1P-u3YCsDKjc$f%Hg{r%Jj3Cwt)XGcvZdl;>;FS#g-8fl*ykL+A|qTHYT z%7S<{SqzK+VTpI1J-h!>`M?ctrIF_sa5r%ePiq}WW zZ?VVX*3(2moUlj8bYA{#l7zvso;8~EX+`SvUoOh5z zLf#2lVoQ!96Cd0ep4hMC-?Qa_**%o&PvmBmE$yT9ooIxEJS05-!zmWoZCD;ULpF!A zRwhNt4a&RAERDMJO`j3YW7BSMc{)jz7!_I{YL}5Nh@ih}|9Ts#|9!!cJopfOi`3NA z)R8t|mb-jsaWEznbR6z2 zYlsA%zvorpkC=Sa>7Ex>Pg8bImULxObV=cu%bfVxevF%b0nf~Sb;W*ky8*gMatbnL zLT|l{Q)XU|kp~@$RCrwG_TyO{P0ZsaUQ+YLR--2Czr1C9UEwgy+oW3RdXsB#ezdX8 zfIYERFjS$LG^{}BaC8NuDk;oW&-ihl7QWXS3-JBcnR(Uz(WKxN*+l=HFXx4LWc@`Y zZ%zL^UhzM1OQ`LT@I+`)w6l}dMaBM^l|Pz8Gt&m#yYiaWo@2FF@#3FvlH-pb$}N2s zh<{)l>s2T^u5&_Jd8?@5_>=25Lk08Yh?gsd88Gp++ZFbM?+E2>gg524{FVBgLpZF- zo%=SpPi|6I;_tI|^D3`~M1Ir|s-psEv`2YFTIOeR({>OozJb!|@3>x6_^@(vq}pw$ zC?_X}7(s5)cP%8Ju5N+Tg~bi*GrkkG{AePrPmLz)ySoSpVw(QmCwuLOtXup!n}+ zztutZh~_8$q%)HWSjE0k08+h(q0$NY?`Mmgz&PhN1nE3y)+L~ptATHxlPAsxaUz^& z4F2{2TD&Yqu#@Z55`_L{0pFYrWO;rc)GYgtrz%4Pk{8qyEY56rU>RU9f^Tkqj{L&= zzwCs!0C!z_-&qROc`dtEKaI8#wWZsm!FW(I(&DQ8^A;R@C=1>6r{gZTX&%MO$9LY# z%geoOPENzyTN<;KZ=9Z%2D7lR*ae&MMRT)~;^Zgd>QB>R*OR%SQTL+2*BJ?~vuFS? zPCkH|PiG=RW?755%yY%a0BE`U%nx+gbO(Iz^ERHx-4BEAhe@-j1GS!lT_9T}l^g+* zlVD=0a{TM%1cRsYCwo1wLCc+kF2@g$P}<$>N5{#4R>Vy}pV_Nj{udgh?eF>_2#zrdlI?d<#g{t~!qB?X|BK5eiE+%)Sd{y>WAr1+$wTZ5$M=+;%%v zjtP`03QX1j1;=vP5C2wkYbH0Y{%KkZH9kauasNumCvF5R^fVxAtcS{S@2aYk(bB$v ziV$(AMkv|a^Mf%z2Qp$3WPkOohr$X#xvUFJd4yv?e@|`B^^++`c`h3W$m5V4lOVA1 z=)Jq3wYUQJL_h}EKnmNVg$}-=W}xrV4Bf(G6B0Ps*c9eMDB(P^uIz2^&$)miY#nuB zVI%@cynoBi!=T=tP`R^xjVE)N3BC1xK1*-@J}k2)H}c!M52b^*x3>?~f&O!lavQ2v zQfdr&A|NQJ^8q)y_pPg-6=YB?#9#*G{o7!V+rHzE1dd)ea)hXnO)-eB!|rDuHR!%& zRW-eA{`o2X%cvM18P8=UpbSSYYkR{CbvLfs^dZG8yM*7fNZ3+=tWcGuH-xA}ZJvMg z_?ethUN8Elc9N5q>XU;7^~KpH)hB_$*7Uo?EUs!~;2b>{)K=y=x|MAL zG{bwKkxi_cD8$3=JXY&9Buh_&0d3sM+&o_m5u*I+-4}jckqw^YeSt#~yTE}L*NEB! z=a!$t<3Gg`4mB-hQc)BC1Gp^GSPE)c>GvLzlD*WJ~y_+chB{ofj}A;oTn#NnFSPx)24aX zR(nI=!a!d?m*z7L4*HP@!#8>RT?G6C`x99}f$d}D7Br`#9E8E(%<4)C_w+d1c!xj| z@u1m#-46!2BME;Vkz3fV{vr+QlACuG<~OTfn@^+vHJEQp)*%pG|0)SfX+5*E51=(y25Sx&!~aW;o^Sv; p>OUSP@xMO%UkCmFyN6Ajz?Uj2@tMRb!eHQ!y0WIyhZ`nO{|^otI!XWl literal 0 HcmV?d00001 diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 0000000..411b93f --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,21 @@ +name-template: "v$RESOLVED_VERSION" +tag-template: "v$RESOLVED_VERSION" + +categories: + - title: ":rocket: Features" + labels: [enhancement, feature] + - title: ":wrench: Fixes" + labels: [bug, bugfix, fix] + - title: ":toolbox: Maintenance & Refactor" + labels: [refactor, refactoring, chore] + - title: ":package: Build System & CI/CD & Test" + labels: [build, ci, testing, test] + - title: ":pencil: Documentation" + labels: [documentation] + - title: ":arrow_up: Dependencies updates" + labels: [dependencies] + +template: | + ## Whatโ€™s Changed + + $CHANGES diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml new file mode 100644 index 0000000..0018f7f --- /dev/null +++ b/.github/workflows/linter.yml @@ -0,0 +1,32 @@ +name: linter + +on: [push] + +jobs: + check-lint: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + + - name: Cache pip + uses: actions/cache@v2 + with: + # This path is specific to Ubuntu + path: ~/.cache/pip + # Look to see if there is a cache hit for the corresponding requirements file + key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + ${{ runner.os }}- + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + - name: Check Lint (black, flake8, isort) + run: | + make quality diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml new file mode 100644 index 0000000..a052646 --- /dev/null +++ b/.github/workflows/release-drafter.yml @@ -0,0 +1,20 @@ +name: Release Drafter + +on: + push: + # branches to consider in the event; optional, defaults to all + branches: + - master + # pull_request event is required only for autolabeler + pull_request: + # Only following types are handled by the action, but one can default to all as well + types: [opened, reopened, synchronize] + +jobs: + update_release_draft: + runs-on: ubuntu-latest + steps: + # Drafts your next Release notes as Pull Requests are merged into "master" + - uses: release-drafter/release-drafter@v5.15.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..de30ccd --- /dev/null +++ b/.gitignore @@ -0,0 +1,135 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Custom Ignore +.vscode/ +.idea/ +.DS_Store +test.ipynb diff --git a/.gitmessage b/.gitmessage new file mode 100644 index 0000000..eafd7d4 --- /dev/null +++ b/.gitmessage @@ -0,0 +1,16 @@ +# Title: Summary, imperative, don't end with a period +# No more than 50 chars. #### 50 chars is here: # + +# Remember blank line between title and body. + +# Body: Explain *what* and *why* (not *how*). +# Wrap at 72 chars. ################################## which is here: # + +# feat : ๊ธฐ๋Šฅ (์ƒˆ๋กœ์šด ๊ธฐ๋Šฅ) +# fix : ๋ฒ„๊ทธ (๋ฒ„๊ทธ ์ˆ˜์ •) +# refactor: ๋ฆฌํŒฉํ† ๋ง +# style : ์Šคํƒ€์ผ (์ฝ”๋“œ ํ˜•์‹, ์„ธ๋ฏธ์ฝœ๋ก  ์ถ”๊ฐ€: ๋น„์ฆˆ๋‹ˆ์Šค ๋กœ์ง์— ๋ณ€๊ฒฝ ์—†์Œ) +# docs : ๋ฌธ์„œ (๋ฌธ์„œ ์ถ”๊ฐ€, ์ˆ˜์ •, ์‚ญ์ œ) +# dep : ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ์˜์กด์„ฑ (๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ์ถ”๊ฐ€, ๋ฒ„์ „ ์ˆ˜์ •) +# test : ํ…Œ์ŠคํŠธ (ํ…Œ์ŠคํŠธ ์ฝ”๋“œ ์ถ”๊ฐ€, ์ˆ˜์ •, ์‚ญ์ œ: ๋น„์ฆˆ๋‹ˆ์Šค ๋กœ์ง์— ๋ณ€๊ฒฝ ์—†์Œ) +# chore : ๊ธฐํƒ€ ๋ณ€๊ฒฝ์‚ฌํ•ญ (๋นŒ๋“œ ์Šคํฌ๋ฆฝํŠธ ์ˆ˜์ • ๋“ฑ) diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..3e6c413 --- /dev/null +++ b/Makefile @@ -0,0 +1,33 @@ +clean: clean-pyc clean-test +quality: set-style-dep check-quality +style: set-style-dep set-style + +##### basic ##### +set-git: + git config --local commit.template .gitmessage + +set-style-dep: + pip3 install isort==5.9.3 black==21.7b0 flake8==3.9.2 + +set-style: + black --config pyproject.toml . + isort --settings-path pyproject.toml . + flake8 . + +check-quality: + black --config pyproject.toml --check . + isort --settings-path pyproject.toml --check-only . + flake8 . + +##### clean ##### +clean-pyc: + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + find . -name '__pycache__' -exec rm -fr {} + + +clean-test: + rm -f .coverage + rm -f .coverage.* + rm -rf .pytest_cache + rm -rf .mypy_cache diff --git a/README.md b/README.md new file mode 100644 index 0000000..e887d3e --- /dev/null +++ b/README.md @@ -0,0 +1,122 @@ +

+ + + +

Pretrained BigBird Model for Korean

+ +

+ What is BigBird โ€ข + How to Use โ€ข + Pretraining โ€ข + Evaluation Result โ€ข + Docs โ€ข + Citation +

+ +

+ ํ•œ๊ตญ์–ด | + English +

+ +

+ + Apache 2.0 + + + Issues + + + linter + +

+ +
+ +## What is BigBird? + + + +[BigBird: Transformers for Longer Sequences](https://arxiv.org/abs/2007.14062)์—์„œ ์†Œ๊ฐœ๋œ **sparse-attention** ๊ธฐ๋ฐ˜์˜ ๋ชจ๋ธ๋กœ, ์ผ๋ฐ˜์ ์ธ BERT๋ณด๋‹ค **๋” ๊ธด sequence**๋ฅผ ๋‹ค๋ฃฐ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +๐Ÿฆ… **Longer Sequence** - ์ตœ๋Œ€ 512๊ฐœ์˜ token์„ ๋‹ค๋ฃฐ ์ˆ˜ ์žˆ๋Š” BERT์˜ 8๋ฐฐ์ธ **์ตœ๋Œ€ 4096๊ฐœ์˜ token**์„ ๋‹ค๋ฃธ + +โฑ๏ธ **Computational Efficiency** - Full attention์ด ์•„๋‹Œ **Sparse Attention**์„ ์ด์šฉํ•˜์—ฌ O(n2)์—์„œ O(n)์œผ๋กœ ๊ฐœ์„  + +## How to Use + +- ๐Ÿค— [Huggingface Hub](https://huggingface.co/monologg/kobigbird-bert-base)์— ์—…๋กœ๋“œ๋œ ๋ชจ๋ธ์„ ๊ณง๋ฐ”๋กœ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค:) +- ์ผ๋ถ€ ์ด์Šˆ๊ฐ€ ํ•ด๊ฒฐ๋œ `transformers>=4.11.0` ์‚ฌ์šฉ์„ ๊ถŒ์žฅํ•ฉ๋‹ˆ๋‹ค. ([MRC ์ด์Šˆ ๊ด€๋ จ PR](https://github.com/huggingface/transformers/pull/13143)) +- **BigBirdTokenizer ๋Œ€์‹ ์— `BertTokenizer` ๋ฅผ ์‚ฌ์šฉํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. (`AutoTokenizer` ์‚ฌ์šฉ์‹œ `BertTokenizer`๊ฐ€ ๋กœ๋“œ๋ฉ๋‹ˆ๋‹ค.)** +- ์ž์„ธํ•œ ์‚ฌ์šฉ๋ฒ•์€ [BigBird Tranformers documentation](https://huggingface.co/transformers/model_doc/bigbird.html)์„ ์ฐธ๊ณ ํ•ด์ฃผ์„ธ์š”. + +```python +from transformers import AutoModel, AutoTokenizer + +model = AutoModel.from_pretrained("monologg/kobigbird-bert-base") # BigBirdModel +tokenizer = AutoTokenizer.from_pretrained("monologg/kobigbird-bert-base") # BertTokenizer +``` + +## Pretraining + +> ์ž์„ธํ•œ ๋‚ด์šฉ์€ [[Pretraining BigBird]](pretrain/README.md) ์ฐธ๊ณ  + +| | Hardware | Max len | LR | Batch | Train Step | Warmup Step | +| :---------------------- | -------: | ------: | ---: | ----: | ---------: | ----------: | +| **KoBigBird-BERT-Base** | TPU v3-8 | 4096 | 1e-4 | 32 | 2M | 20k | + +- ๋ชจ๋‘์˜ ๋ง๋ญ‰์น˜, ํ•œ๊ตญ์–ด ์œ„ํ‚ค, Common Crawl, ๋‰ด์Šค ๋ฐ์ดํ„ฐ ๋“ฑ ๋‹ค์–‘ํ•œ ๋ฐ์ดํ„ฐ๋กœ ํ•™์Šต +- `ITC (Internal Transformer Construction)` ๋ชจ๋ธ๋กœ ํ•™์Šต ([ITC vs ETC](https://huggingface.co/blog/big-bird#itc-vs-etc)) + +## Evaluation Result + +### 1. Short Sequence (<=512) + +> ์ž์„ธํ•œ ๋‚ด์šฉ์€ [[Finetune on Short Sequence Dataset]](docs/short_seq_evaluation_ko.md) ์ฐธ๊ณ  + +| | NSMC
(acc) | KLUE-NLI
(acc) | KLUE-STS
(pearsonr) | Korquad 1.0
(em/f1) | KLUE MRC
(em/rouge-w) | +| :---------------------- | :-----------: | :---------------: | :--------------------: | :--------------------: | :----------------------: | +| KoELECTRA-Base-v3 | 91.13 | 86.87 | **93.14** | 85.66 / 93.94 | 59.54 / 65.64 | +| KLUE-RoBERTa-Base | 91.16 | 86.30 | 92.91 | 85.35 / 94.53 | 69.56 / 74.64 | +| **KoBigBird-BERT-Base** | **91.18** | **87.17** | 92.61 | **87.08 / 94.71** | **70.33 / 75.34** | + +### 2. Long Sequence (>=1024) + +> ์ž์„ธํ•œ ๋‚ด์šฉ์€ [[Finetune on Long Sequence Dataset]](finetune/README.md) ์ฐธ๊ณ  + +| | TyDi QA
(em/f1) | Korquad 2.1
(em/f1) | Fake News
(f1) | Modu Sentiment
(f1-macro) | +| :---------------------- | :-----------------: | :---------------------: | :----------------: | :---------------------------: | +| KLUE-RoBERTa-Base | 76.80 / 78.58 | 55.44 / 73.02 | 95.20 | 42.61 | +| **KoBigBird-BERT-Base** | **79.13 / 81.30** | **67.77 / 82.03** | **98.85** | **45.42** | + +## Docs + +- [Pretraing BigBird](pretrain/README.md) +- [Finetune on Short Sequence Dataset](docs/short_seq_evaluation_ko.md) +- [Finetune on Long Sequence Dataset](finetune/README.md) +- [Download Tensorflow v1 checkpoint](docs/download_tfv1_ckpt.md) +- [GPU Benchmark result](docs/gpu_benchmark.md) + +## Citation + +KoBigBird๋ฅผ ์‚ฌ์šฉํ•˜์‹ ๋‹ค๋ฉด ์•„๋ž˜์™€ ๊ฐ™์ด ์ธ์šฉํ•ด์ฃผ์„ธ์š”. + +```bibtex +@misc{park2021kobigbird, + author = {Jangwon Park, Donggyu Kim}, + title = {KoBigBird: Pretrained BigBird Model for Korean}, + year = {2021}, + publisher = {GitHub}, + journal = {GitHub repository}, + howpublished = {\url{https://github.com/monologg/KoBigBird}} +} +``` + +## Contributors + +[Jangwon Park](https://github.com/monologg) and [Donggyu Kim](https://github.com/donggyukimc) + +## Acknowledgements + +KoBigBird๋Š” Tensorflow Research Cloud (TFRC) ํ”„๋กœ๊ทธ๋žจ์˜ Cloud TPU ์ง€์›์œผ๋กœ ์ œ์ž‘๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +๋˜ํ•œ ๋ฉ‹์ง„ ๋กœ๊ณ ๋ฅผ ์ œ๊ณตํ•ด์ฃผ์‹  [Seyun Ahn](https://www.instagram.com/ahnsy13)๋‹˜๊ป˜ ๊ฐ์‚ฌ๋ฅผ ์ „ํ•ฉ๋‹ˆ๋‹ค. diff --git a/README_EN.md b/README_EN.md new file mode 100644 index 0000000..9d0e962 --- /dev/null +++ b/README_EN.md @@ -0,0 +1,122 @@ +
+ + + +

Pretrained BigBird Model for Korean

+ +

+ What is BigBird โ€ข + How to Use โ€ข + Pretraining โ€ข + Evaluation Result โ€ข + Docs โ€ข + Citation +

+ +

+ ํ•œ๊ตญ์–ด | + English +

+ +

+ + Apache 2.0 + + + Issues + + + linter + +

+ +
+ +## What is BigBird? + + + +[BigBird: Transformers for Longer Sequences](https://arxiv.org/abs/2007.14062) is a **sparse-attention** based model that can handle **longer sequences** than a normal BERT. + +๐Ÿฆ… **Longer Sequence** - Handles **up to 4096 tokens**, 8 times the BERT, which can handle up to 512 tokens + +โฑ๏ธ **Computational Efficiency** - Improved from O(n2) to O(n) using **Sparse Attention** instead of Full Attention + +## How to Use + +- Available on the ๐Ÿค— [Huggingface Hub](https://huggingface.co/monologg/kobigbird-bert-base)! +- Recommend to use `transformers>=4.11.0`, which some issues are fixed ([PR related with MRC issue](https://github.com/huggingface/transformers/pull/13143)) +- **You have to use `BertTokenizer` instead of BigBirdTokenizer (`BertTokenizer` will be loaded if you use `AutoTokenizer`)** +- For detail guideline, see [BigBird Tranformers documentation](https://huggingface.co/transformers/model_doc/bigbird.html). + +```python +from transformers import AutoModel, AutoTokenizer + +model = AutoModel.from_pretrained("monologg/kobigbird-bert-base") # BigBirdModel +tokenizer = AutoTokenizer.from_pretrained("monologg/kobigbird-bert-base") # BertTokenizer +``` + +## Pretraining + +> For more information, see [[Pretraining BigBird]](pretrain/README_EN.md) + +| | Hardware | Max len | LR | Batch | Train Step | Warmup Step | +| :---------------------- | -------: | ------: | ---: | ----: | ---------: | ----------: | +| **KoBigBird-BERT-Base** | TPU v3-8 | 4096 | 1e-4 | 32 | 2M | 20k | + +- Trained with various data such as Everyone's Corpus, Korean Wiki, Common Crawl, and news data +- Use `ITC (Internal Transformer Construction)` model for pretraining. ([ITC vs ETC](https://huggingface.co/blog/big-bird#itc-vs-etc)) + +## Evaluation Result + +### 1. Short Sequence (<=512) + +> For more information, see [[Finetune on Short Sequence Dataset]](docs/short_seq_evaluation_en.md) + +| | NSMC
(acc) | KLUE-NLI
(acc) | KLUE-STS
(pearsonr) | Korquad 1.0
(em/f1) | KLUE MRC
(em/rouge-w) | +| :---------------------- | :-----------: | :---------------: | :--------------------: | :--------------------: | :----------------------: | +| KoELECTRA-Base-v3 | 91.13 | 86.87 | **93.14** | 85.66 / 93.94 | 59.54 / 65.64 | +| KLUE-RoBERTa-Base | 91.16 | 86.30 | 92.91 | 85.35 / 94.53 | 69.56 / 74.64 | +| **KoBigBird-BERT-Base** | **91.18** | **87.17** | 92.61 | **87.08 / 94.71** | **70.33 / 75.34** | + +### 2. Long Sequence (>=1024) + +> For more information, see [[Finetune on Long Sequence Dataset]](finetune/README_EN.md) + +| | TyDi QA
(em/f1) | Korquad 2.1
(em/f1) | Fake News
(f1) | Modu Sentiment
(f1-macro) | +| :---------------------- | :-----------------: | :---------------------: | :----------------: | :---------------------------: | +| KLUE-RoBERTa-Base | 76.80 / 78.58 | 55.44 / 73.02 | 95.20 | 42.61 | +| **KoBigBird-BERT-Base** | **79.13 / 81.30** | **67.77 / 82.03** | **98.85** | **45.42** | + +## Docs + +- [Pretraing BigBird](pretrain/README_EN.md) +- [Finetune on Short Sequence Dataset](docs/short_seq_evaluation_en.md) +- [Finetune on Long Sequence Dataset](finetune/README_EN.md) +- [Download Tensorflow v1 checkpoint](docs/download_tfv1_ckpt.md) +- [GPU Benchmark result](docs/gpu_benchmark.md) + +## Citation + +If you apply KoBigBird to any project and research, please cite our code as below. + +```bibtex +@misc{park2021kobigbird, + author = {Jangwon Park, Donggyu Kim}, + title = {KoBigBird: Pretrained BigBird Model for Korean}, + year = {2021}, + publisher = {GitHub}, + journal = {GitHub repository}, + howpublished = {\url{https://github.com/monologg/KoBigBird}} +} +``` + +## Contributors + +[Jangwon Park](https://github.com/monologg) and [Donggyu Kim](https://github.com/donggyukimc) + +## Acknowledgements + +KoBigBird is built with Cloud TPU support from the Tensorflow Research Cloud (TFRC) program. + +Also, thanks to [Seyun Ahn](https://www.instagram.com/ahnsy13) for a nice logo:) diff --git a/docs/download_tfv1_ckpt.md b/docs/download_tfv1_ckpt.md new file mode 100644 index 0000000..0ebe5e0 --- /dev/null +++ b/docs/download_tfv1_ckpt.md @@ -0,0 +1,9 @@ +# Download Tensorflow v1 checkpoint + +We've upload tensorflow v1 checkpoint on [huggingface hub](https://huggingface.co/monologg/kobigbird-bert-base/tree/tfv1). + +Please download the checkpoint with the command below:) + +```bash +wget https://huggingface.co/monologg/kobigbird-bert-base/resolve/tfv1/kobigbird-bert-base-tf1.tar.gz +``` diff --git a/docs/gpu_benchmark.md b/docs/gpu_benchmark.md new file mode 100644 index 0000000..3fd886f --- /dev/null +++ b/docs/gpu_benchmark.md @@ -0,0 +1,32 @@ +# GPU Benchmark result + +`GPU Memory` & `Inference Time` check according to **sequence length** + +## Environment Info + +- transformers_version: 4.11.3 +- framework: PyTorch +- use_torchscript: False +- framework_version: 1.7.1 +- python_version: 3.6.13 +- system: Linux +- cpu: x86_64 +- architecture: 64bit +- fp16: False +- use_multiprocessing: True +- only_pretrain_model: False +- cpu_ram_mb: 15717 +- use_gpu: True +- num_gpus: 1 +- gpu: Tesla T4 +- gpu_ram_mb: 15109 +- gpu_power_watts: 70.0 +- gpu_performance_state: 0 +- use_tpu: False + +## Benchmark result + +
+ + +
diff --git a/docs/short_seq_evaluation_en.md b/docs/short_seq_evaluation_en.md new file mode 100644 index 0000000..8970009 --- /dev/null +++ b/docs/short_seq_evaluation_en.md @@ -0,0 +1,50 @@ +# Finetune on Short Sequence Dataset + +

+ ํ•œ๊ตญ์–ด | + English +

+ +## Details + +- KoBigBird performance evaluation in `max_seq_length<=512` setting + +- Evaluated with a total of **5 Datasets** + + - Single Sentence Classification: `NSMC` + - Sentence Pair Classification: `KLUE-NLI`, `KLUE-STS` + - Question Answering: `Korquad 1.0`, `KLUE-MRC` + +- **Based on the [KLUE-Baseline](https://github.com/KLUE-benchmark/KLUE-baseline) code with some modifications** + + - Add `nsmc` and `korquad 1.0` tasks + - Fix to be compatible with `transformers==4.11.3` + +- Sequence Classification is trained with a length of **128** and Question Answering with a length of **512** + + - **Full Attention** instead of Sparse Attention (Automatically changed to Full Attention with the following log) + + ```text + Attention type 'block_sparse' is not possible if sequence_length: 300 <= num global tokens: 2 * config.block_size + min. num sliding tokens: 3 * config.block_size + + config.num_random_blocks * config.block_size + additional buffer: config.num_random_blocks * config.block_size = 704 with config.block_size = 64, config.num_random_blocks = 3. + Changing attention type to 'original_full'... + ``` + +## Result + +| | NSMC
(acc) | KLUE-NLI
(acc) | KLUE-STS
(pearsonr) | Korquad 1.0
(em/f1) | KLUE MRC
(em/rouge-w) | +| :---------------------- | :-----------: | :---------------: | :--------------------: | :--------------------: | :----------------------: | +| KoELECTRA-Base-v3 | 91.13 | 86.87 | **93.14** | 85.66 / 93.94 | 59.54 / 65.64 | +| KLUE-RoBERTa-Base | 91.16 | 86.30 | 92.91 | 85.35 / 94.53 | 69.56 / 74.64 | +| **KoBigBird-BERT-Base** | **91.18** | **87.17** | 92.61 | **87.08 / 94.71** | **70.33 / 75.34** | + +- `KLUE` and `Korquad 1.0` are evaluated with **dev set**. +- For `KoELECTRA-Base-v3` and `KLUE-RoBERTa-Base`, we brought the KLUE dataset score from `A. Dev Set Results` in [KLUE Paper](https://arxiv.org/abs/2105.09680). + +## Reference + +- [NSMC](https://github.com/e9t/nsmc) +- [KLUE](https://github.com/KLUE-benchmark/KLUE) +- [Korquad 1.0](https://korquad.github.io/KorQuad%201.0/) +- [KoELECTRA-Base-v3](https://huggingface.co/monologg/koelectra-base-v3-discriminator) +- [KLUE-RoBERTa-Base](https://huggingface.co/klue/roberta-base) diff --git a/docs/short_seq_evaluation_ko.md b/docs/short_seq_evaluation_ko.md new file mode 100644 index 0000000..d9f0459 --- /dev/null +++ b/docs/short_seq_evaluation_ko.md @@ -0,0 +1,50 @@ +# Finetune on Short Sequence Dataset + +

+ ํ•œ๊ตญ์–ด | + English +

+ +## Details + +- `max_seq_length<=512` ํ™˜๊ฒฝ์—์„œ์˜ KoBigBird ์„ฑ๋Šฅ ํ‰๊ฐ€ + +- ์ด **5๊ฐœ์˜ Dataset**์œผ๋กœ ํ‰๊ฐ€ + + - Single Sentence Classification: `NSMC` + - Sentence Pair Classification: `KLUE-NLI`, `KLUE-STS` + - Question Answering: `Korquad 1.0`, `KLUE-MRC` + +- **[KLUE-Baseline](https://github.com/KLUE-benchmark/KLUE-baseline)์˜ ์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์ผ๋ถ€ ์ˆ˜์ •ํ•˜์—ฌ ํ•™์Šต** + + - `nsmc`์™€ `korquad 1.0` task ์ถ”๊ฐ€ + - `transformers==4.11.3`์— ํ˜ธํ™˜๋˜๋„๋ก ์ˆ˜์ • + +- Sequence Classification์€ **128**, Question Answering์€ **512**์˜ ๊ธธ์ด๋กœ ํ•™์Šต + + - Sparse Attention์ด ์•„๋‹Œ **Full Attention**์œผ๋กœ ์„ธํŒ… (์•„๋ž˜์˜ ๋กœ๊ทธ๊ฐ€ ๋‚˜์˜ค๋ฉด์„œ ์ž๋™์œผ๋กœ Full Attention์œผ๋กœ ๋ณ€๊ฒฝ) + + ```text + Attention type 'block_sparse' is not possible if sequence_length: 300 <= num global tokens: 2 * config.block_size + min. num sliding tokens: 3 * config.block_size + + config.num_random_blocks * config.block_size + additional buffer: config.num_random_blocks * config.block_size = 704 with config.block_size = 64, config.num_random_blocks = 3. + Changing attention type to 'original_full'... + ``` + +## Result + +| | NSMC
(acc) | KLUE-NLI
(acc) | KLUE-STS
(pearsonr) | Korquad 1.0
(em/f1) | KLUE MRC
(em/rouge-w) | +| :---------------------- | :-----------: | :---------------: | :--------------------: | :--------------------: | :----------------------: | +| KoELECTRA-Base-v3 | 91.13 | 86.87 | **93.14** | 85.66 / 93.94 | 59.54 / 65.64 | +| KLUE-RoBERTa-Base | 91.16 | 86.30 | 92.91 | 85.35 / 94.53 | 69.56 / 74.64 | +| **KoBigBird-BERT-Base** | **91.18** | **87.17** | 92.61 | **87.08 / 94.71** | **70.33 / 75.34** | + +- `KLUE`, `Korquad 1.0` ๋ชจ๋‘ **dev set**์œผ๋กœ ํ‰๊ฐ€ +- `KoELECTRA-Base-v3`์™€ `KLUE-RoBERTa-Base`์˜ KLUE dataset ๊ด€๋ จ ์ ์ˆ˜๋Š” [KLUE Paper](https://arxiv.org/abs/2105.09680)์˜ `A. Dev Set Results`์—์„œ ์ฐธ๊ณ  + +## Reference + +- [NSMC](https://github.com/e9t/nsmc) +- [KLUE](https://github.com/KLUE-benchmark/KLUE) +- [Korquad 1.0](https://korquad.github.io/KorQuad%201.0/) +- [KoELECTRA-Base-v3](https://huggingface.co/monologg/koelectra-base-v3-discriminator) +- [KLUE-RoBERTa-Base](https://huggingface.co/klue/roberta-base) diff --git a/finetune/.gitignore b/finetune/.gitignore new file mode 100644 index 0000000..6b3c714 --- /dev/null +++ b/finetune/.gitignore @@ -0,0 +1,5 @@ +cache/ +ckpt/ +output/ +seq2seq-model/ +*.out diff --git a/finetune/README.md b/finetune/README.md new file mode 100644 index 0000000..d2b985d --- /dev/null +++ b/finetune/README.md @@ -0,0 +1,78 @@ +# Finetune on Long Sequence Dataset + +

+ ํ•œ๊ตญ์–ด | + English +

+ +## About Dataset + +| Dataset | Task | Length (median) | Length (max) | +| ------------------ | ----------------------- | --------------: | -----------: | +| **TyDi QA** | Question Answering | 6,165 | 67,135 | +| **Korquad 2.1** | Question Answering | 5,777 | 486,730 | +| **Fake News** | Sequence Classification | 564 | 17,488 | +| **Modu Sentiment** | Sequence Classification | 185 | 5,245 | + +- `Length`๋Š” subword token์„ ๊ธฐ์ค€์œผ๋กœ ๊ณ„์‚ฐํ–ˆ์Šต๋‹ˆ๋‹ค. +- [TyDi QA](https://github.com/google-research-datasets/tydiqa)๋Š” ๋ณธ๋ž˜ `๋‹ค๊ตญ์–ด(multilingual) ๋ฐ์ดํ„ฐ์…‹`์ด๋ฉฐ `์˜ˆ-์•„๋‹ˆ์˜ค (BoolQA)` ๋‹ต๋ณ€ ๋ฐ์ดํ„ฐ๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. **๋ณธ ํ”„๋กœ์ ํŠธ์—์„œ๋Š” ํ•œ๊ตญ์–ด ๋ฐ์ดํ„ฐ์…‹๋งŒ์„ ์‚ฌ์šฉํ–ˆ์œผ๋ฉฐ, ์˜ˆ-์•„๋‹ˆ์˜ค ๋‹ต๋ณ€ ๋ฐ์ดํ„ฐ ๋˜ํ•œ ์ œ์™ธํ•˜์˜€์Šต๋‹ˆ๋‹ค.** + +## Setup + +### 1. Requirements + +```bash +pip3 install -r requirements.txt +``` + +### 2. Prepare Dataset + +#### 1) Question Answering + +```bash +bash download_qa_dataset.sh +``` + +#### 2) Sequence Classification + +- **์•„๋ž˜์˜ ๋งํฌ๋ฅผ ํ†ตํ•ด ๋ฐ์ดํ„ฐ๋ฅผ ๋‹ค์šด๋กœ๋“œ ํ›„, ๋ฐ์ดํ„ฐ๋ฅผ `--data_dir` ๊ฒฝ๋กœ์— ์œ„์น˜์‹œ์ผœ ์ฃผ์„ธ์š”.** +- `Fake news`: [Korean Fake news](https://github.com/2alive3s/Fake_news/blob/b43638105f4802de5773c21afe539157ebed6cc5/data/mission2_train.zip) (`mission2_train.csv`) +- `Modu sentiment corpus`: [๊ฐ์„ฑ ๋ถ„์„ ๋ง๋ญ‰์น˜ 2020](https://corpus.korean.go.kr) (`EXSA2002108040.json`) + +## How to Run + +- ํฐ ๊ทœ๋ชจ์˜ Long sequence ๋ฐ์ดํ„ฐ์…‹์„ ํ•™์Šต์‹œํ‚ค๊ธฐ ์œ„ํ•ด์„œ **TPU ์ธ์Šคํ„ด์Šค**์—์„œ ์‹คํ–‰ํ•˜๋Š” ๊ฒƒ์„ ๊ถŒ์žฅํ•ฉ๋‹ˆ๋‹ค. +- ํ‰๊ฐ€ ๊ฒฐ๊ณผ๋Š” ๋ชจ๋‘ [torch-xla-1.8.1](https://github.com/pytorch/xla#-consume-prebuilt-compute-vm-images) ํ™˜๊ฒฝ์—์„œ `TPU v3-8`์„ ์ด์šฉํ•˜์—ฌ ํ•™์Šต ๋ฐ ํ‰๊ฐ€ํ–ˆ์Šต๋‹ˆ๋‹ค. +- TPU๊ฐ€ ์•„๋‹Œ GPU๋กœ ํ•™์Šตํ•˜๊ณ  ์‹ถ์„ ์‹œ ์Šคํฌ๋ฆฝํŠธ ์•ˆ์˜ `--use_tpu` ์ธ์ž๋ฅผ ์ œ์™ธํ•˜๋ฉด ๋ฉ๋‹ˆ๋‹ค. + +```bash +bash scripts/run_{$TASK_NAME}.sh # kobigbird +bash scripts/run_{$TASK_NAME}_short.sh # klue roberta +``` + +```bash +bash scripts/run_tydiqa.sh # tydiqa +bash scripts/run_korquad_2.sh # korquad 2.1 +bash scripts/run_fake_news.sh # fake news +bash scripts/run_modu_sentiment.sh # modu sentiment +``` + +## Results + +- Sequence Classification์˜ ๊ฒฝ์šฐ `train:test=8:2` ๋กœ splitํ•˜์—ฌ ํ‰๊ฐ€ํ–ˆ์Šต๋‹ˆ๋‹ค. +- Korquad 2.1 ๋ฐ์ดํ„ฐ์…‹์˜ ๊ฒฝ์šฐ, ์ปดํ“จํŒ… ์ž์›์˜ ํ•œ๊ณ„๋กœ **ํ•™์Šต ๋ฐ์ดํ„ฐ์…‹์˜ ์ผ๋ถ€๋งŒ์œผ๋กœ ๋ชจ๋ธ์„ ํ•™์Šต**ํ–ˆ์Šต๋‹ˆ๋‹ค. + - `--all_korquad_2_sample` ์ธ์ž๋ฅผ ์Šคํฌ๋ฆฝํŠธ์— ์ถ”๊ฐ€ํ•˜๋ฉด ์ „์ฒด ๋ฐ์ดํ„ฐ๋ฅผ ์ด์šฉํ•˜์—ฌ ํ•™์Šต ๊ฐ€๋Šฅ +- `KoBigBird`์˜ ๊ฒฝ์šฐ Question Answering์€ **4096**, Sequence Classification์€ **1024**์˜ ๊ธธ์ด๋กœ ํ•™์Šตํ–ˆ์Šต๋‹ˆ๋‹ค. +- `KLUE RoBERTa`๋Š” **512**์˜ ๊ธธ์ด๋กœ ํ•™์Šตํ–ˆ์Šต๋‹ˆ๋‹ค. + +| | TyDi QA
(em/f1) | Korquad 2.1
(em/f1) | Fake News
(f1) | Modu Sentiment
(f1-macro) | +| :---------------------- | :-----------------: | :---------------------: | :----------------: | :---------------------------: | +| KLUE-RoBERTa-Base | 76.80 / 78.58 | 55.44 / 73.02 | 95.20 | 42.61 | +| **KoBigBird-BERT-Base** | **79.13 / 81.30** | **67.77 / 82.03** | **98.85** | **45.42** | + +## Reference + +- [TyDi QA](https://github.com/google-research-datasets/tydiqa) +- [Korquad](https://korquad.github.io/) +- [Korean Fake news](https://github.com/2alive3s/Fake_news) +- [๋ชจ๋‘์˜ ๋ง๋ญ‰์น˜](https://corpus.korean.go.kr/) diff --git a/finetune/README_EN.md b/finetune/README_EN.md new file mode 100644 index 0000000..b7ddaf8 --- /dev/null +++ b/finetune/README_EN.md @@ -0,0 +1,78 @@ +# Finetune on Long Sequence Dataset + +

+ ํ•œ๊ตญ์–ด | + English +

+ +## About Dataset + +| Dataset | Task | Length (median) | Length (max) | +| ------------------ | ----------------------- | --------------: | -----------: | +| **TyDi QA** | Question Answering | 6,165 | 67,135 | +| **Korquad 2.1** | Question Answering | 5,777 | 486,730 | +| **Fake News** | Sequence Classification | 564 | 17,488 | +| **Modu Sentiment** | Sequence Classification | 185 | 5,245 | + +- `Length` is calculated based on subword token. +- [TyDi QA](https://github.com/google-research-datasets/tydiqa) is originally `multilingual` and contains `BoolQA` cases. **We only use korean samples and skip BoolQA samples.** + +## Setup + +### 1. Requirements + +```bash +pip3 install -r requirements.txt +``` + +### 2. Prepare Dataset + +#### 1) Question Answering + +```bash +bash download_qa_dataset.sh +``` + +#### 2) Sequence Classification + +- **After downloading the data through the link below, place the data in the `--data_dir` path.** +- `Fake news`: [Korean Fake news](https://github.com/2alive3s/Fake_news/blob/b43638105f4802de5773c21afe539157ebed6cc5/data/mission2_train.zip) (`mission2_train.csv`) +- `Modu sentiment corpus`: [๊ฐ์„ฑ ๋ถ„์„ ๋ง๋ญ‰์น˜ 2020](https://corpus.korean.go.kr) (`EXSA2002108040.json`) + +## How to Run + +- We highly recommend to run the scripts on **TPU instance** in order to train and evaluate large and long-sequence datasets. +- We trained and evaluated the models on the [torch-xla-1.8.1](https://github.com/pytorch/xla#-consume-prebuilt-compute-vm-images) environment with `TPU v3-8`. +- Disable `--use_tpu` argument for GPU training. + +```bash +bash scripts/run_{$TASK_NAME}.sh # kobigbird +bash scripts/run_{$TASK_NAME}_short.sh # klue roberta +``` + +```bash +bash scripts/run_tydiqa.sh # tydiqa +bash scripts/run_korquad_2.sh # korquad 2.1 +bash scripts/run_fake_news.sh # fake news +bash scripts/run_modu_sentiment.sh # modu sentiment +``` + +## Results + +- In the case of sequence classification, it was evaluated by splitting `train:test=8:2`. +- For `korquad 2.1`, we **only use the subset of the train dataset** because of limited computational resources. + - Enable `--all_korquad_2_sample` argument in order to use full train dataset. +- In the case of `KoBigBird`, question answering was trained with a length of **4096** and sequence classification was trained with a length of **1024**. +- `KLUE RoBERTa` was trained with a length of **512**. + +| | TyDi QA
(em/f1) | Korquad 2.1
(em/f1) | Fake News
(f1) | Modu Sentiment
(f1-macro) | +| :---------------------- | :-----------------: | :---------------------: | :----------------: | :---------------------------: | +| KLUE-RoBERTa-Base | 76.80 / 78.58 | 55.44 / 73.02 | 95.20 | 42.61 | +| **KoBigBird-BERT-Base** | **79.13 / 81.30** | **67.77 / 82.03** | **98.85** | **45.42** | + +## Reference + +- [TyDi QA](https://github.com/google-research-datasets/tydiqa) +- [Korquad](https://korquad.github.io/) +- [Korean Fake news](https://github.com/2alive3s/Fake_news) +- [๋ชจ๋‘์˜ ๋ง๋ญ‰์น˜](https://corpus.korean.go.kr/) diff --git a/finetune/data/__init__.py b/finetune/data/__init__.py new file mode 100644 index 0000000..dc52b1c --- /dev/null +++ b/finetune/data/__init__.py @@ -0,0 +1 @@ +from data.common import get_data # noqa diff --git a/finetune/data/cls.py b/finetune/data/cls.py new file mode 100644 index 0000000..30c7c97 --- /dev/null +++ b/finetune/data/cls.py @@ -0,0 +1,147 @@ +import json + +import numpy as np +import pandas as pd +import torch +from datasets import load_dataset +from sklearn.model_selection import train_test_split + + +def sample_writer(data, config, tokenizer, is_train): + feature = tokenizer( + data["text"], + max_length=config.max_seq_length, + padding="max_length", + truncation=True, + add_special_tokens=True, + ) + write_data = { + "input_ids": feature["input_ids"], + "attention_mask": feature["attention_mask"], + "labels": data["label"], + } + return write_data + + +def make_label_map(labels): + unique_labels = sorted(list(set(labels))) + label2id = dict() + for i, label in enumerate(unique_labels): + label2id[label] = i + return label2id + + +def postprocess(): + def decorator(fn): + def wrapped(config, data_file, is_train, **kwargs): + get_label_map = kwargs.get("get_label_map", False) + texts, labels = fn(config, data_file, is_train) + + try: + label2id = config.label2id + except Exception: + label2id = label2id = make_label_map(labels) + + labels = [label2id[label] for label in labels] + + if get_label_map: + return label2id + + data = [{"text": text, "label": label} for text, label in zip(texts, labels)] + pd.DataFrame(data).to_csv( + "{}_{}_{}.csv".format(data_file, config.dataset, "train" if is_train else "valid"), + index=False, + encoding="utf-8-sig", + ) + if is_train: + pd.DataFrame(list(label2id.items()), columns=["label", "id"]).to_csv( + "{}_{}_label2id.csv".format(data_file, config.dataset), index=False, encoding="utf-8-sig" + ) + + return data + + return wrapped + + return decorator + + +def train_split(config, texts, labels, is_train): + x_train, y_train, x_label, y_label = train_test_split( + texts, labels, test_size=0.2, random_state=config.seed, stratify=labels + ) + if is_train: + texts, labels = x_train, x_label + else: + texts, labels = y_train, y_label + return texts, labels + + +@postprocess() +def process_fake_news_cls(config, data_file, is_train): + df = pd.read_csv(data_file) + try: + labels = df["Label"].astype(str).values.tolist() + except Exception: + labels = df["label"].astype(str).values.tolist() + texts = [ + title + " " + content + for title, content in zip(df["title"].astype(str).values.tolist(), df["content"].astype(str).values.tolist()) + ] + texts, labels = train_split(config, texts, labels, is_train) + return texts, labels + + +@postprocess() +def process_aihub_sentiment(config, data_file, is_train): + with open(data_file) as handle: + data = json.load(handle) + texts = [" ".join([str(v) for _, v in datum["talk"]["content"].items()]) for datum in data] + labels = [datum["profile"]["emotion"]["type"] for datum in data] + return texts, labels + + +@postprocess() +def process_modu_sentiment(config, data_file, is_train): + with open(data_file) as handle: + data = json.load(handle)["document"] + texts, labels = [], [] + for datum in data: + texts.append(" ".join(paragraph["paragraph_form"] for paragraph in datum["paragraph"])) + labels.append(datum["document_score"]) + texts, labels = train_split(config, texts, labels, is_train) + return texts, labels + + +@postprocess() +def process_nsmc(config, data_file, is_train): + data = load_dataset("nsmc", cache_dir=config.cache_dir) + data = data[("train" if is_train else "test")] + texts, labels = [], [] + for datum in data: + labels.append(datum.pop("label")) + texts.append(datum.pop("document")) + return texts, labels + + +process_map = { + "fake_news": process_fake_news_cls, + "aihub_sentiment": process_aihub_sentiment, + "modu_sentiment": process_modu_sentiment, + "nsmc": process_nsmc, +} + + +def collate_fn(features): + input_ids = [sample["input_ids"] for sample in features] + attention_mask = [sample["attention_mask"] for sample in features] + labels = [sample["labels"] for sample in features] + + input_ids = torch.tensor(np.array(input_ids).astype(np.int64), dtype=torch.long) + attention_mask = torch.tensor(np.array(attention_mask).astype(np.int8), dtype=torch.long) + labels = torch.tensor(np.array(labels).astype(np.int64), dtype=torch.long) + inputs = { + "input_ids": input_ids, + "attention_mask": attention_mask, + "labels": labels, + } + return inputs diff --git a/finetune/data/common.py b/finetune/data/common.py new file mode 100644 index 0000000..d19d203 --- /dev/null +++ b/finetune/data/common.py @@ -0,0 +1,188 @@ +import json +import logging +import multiprocessing +import os + +import torch +import torch.utils.data as torch_data +from tqdm import tqdm + +logging.basicConfig( + format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", + datefmt="%m/%d/%Y %H:%M:%S", + level=logging.INFO, +) + +logger = logging.getLogger(__name__) + +from data import cls as cls_data +from data import qa as qa_data +from datasets import load_dataset + +DATA_PROCESSOR = {"cls": cls_data, "qa": qa_data} + + +def get_data(config, tokenizer, is_train=True, overwrite=False): + if is_train: + data_file = config.train_file + else: + data_file = config.predict_file + + data_path = config.data_dir + if data_file is not None: + data_path = os.path.join(data_path, data_file) + else: + data_path += "/" + + data_processor = DATA_PROCESSOR.get(config.task, None) + if data_processor is None: + raise Exception(f"Invalid data task {config.task}!") + + processor = data_processor.process_map.get(config.dataset, None) + if processor is None: + raise Exception(f"Invalid task dataset {config.dataset}!") + + comps = [ + data_path, + config.dataset, + config.model_name_or_path.replace("/", "_"), + config.max_seq_length, + "train" if is_train else "dev", + "dataset.txt", + ] + dataset_file = "_".join([str(comp) for comp in comps]) + + if not os.path.exists(dataset_file) or overwrite: + with open(dataset_file, "w", encoding="utf-8") as writer_file: + if data_file is None or not os.path.isdir(data_path): + data = processor(config, data_path, is_train) + cnt = write_samples( + config, tokenizer, is_train, data_processor, writer_file, data, workers=config.threads + ) + else: + cnt = 0 + for filename in sorted([f for f in os.listdir(data_path) if f.endswith(".json")]): + data = processor(config, os.path.join(data_path, filename), is_train) + cnt += write_samples( + config, tokenizer, is_train, data_processor, writer_file, data, workers=config.threads + ) + logger.info(f"{cnt} features processed from {data_path}") + + dataset = load_dataset("text", data_files=dataset_file)["train"] + dataset = dataset.map(lambda x: json.loads(x["text"]), batched=False) + + if not is_train: + # for valid datasets, we pad datasets so that no sample will be skiped in multi-device settings + dataset = IterableDatasetPad( + dataset=dataset, + batch_size=config.train_batch_size if is_train else config.eval_batch_size, + num_devices=config.world_size, + seed=config.seed, + ) + + dataloader = torch_data.DataLoader( + dataset, + sampler=torch_data.RandomSampler(dataset) if is_train else None, + drop_last=False, + batch_size=config.train_batch_size if is_train else config.eval_batch_size, + collate_fn=(data_processor.collate_fn), + ) + + return dataloader + + +config = None +tokenizer = None +is_train = None +writer = None + + +def init_sample_writer(_config, _tokenizer, _is_train, _writer): + global config + global tokenizer + global is_train + global writer + config = _config + tokenizer = _tokenizer + is_train = _is_train + writer = _writer + + +def sample_writer(data): + global config + global tokenizer + global is_train + global writer + return writer(data, config, tokenizer, is_train) + + +def write_samples(config, tokenizer, is_train, processor, writer_file, data, workers=4): + write_cnt = 0 + with multiprocessing.Pool( + processes=workers, + initializer=init_sample_writer, + initargs=(config, tokenizer, is_train, processor.sample_writer), + ) as pool: + for write_data in tqdm( + pool.imap(sample_writer, data), total=len(data), dynamic_ncols=True, desc="writing samples..." + ): + if isinstance(write_data, list): + for datum in write_data: + writer_file.write(json.dumps(datum) + "\n") + write_cnt += len(write_data) + else: + writer_file.write(json.dumps(write_data) + "\n") + write_cnt += 1 + return write_cnt + + +class IterableDatasetPad(torch_data.IterableDataset): + def __init__( + self, + dataset: torch_data.IterableDataset, + batch_size: int = 1, + num_devices: int = 1, + seed: int = 0, + ): + self.dataset = dataset + self.batch_size = batch_size + self.seed = seed + self.num_examples = 0 + + chunk_size = self.batch_size * num_devices + length = len(dataset) + self.length = length + (chunk_size - length % chunk_size) + + def __len__(self): + return self.length + + def __iter__(self): + self.num_examples = 0 + if ( + not hasattr(self.dataset, "set_epoch") + and hasattr(self.dataset, "generator") + and isinstance(self.dataset.generator, torch.Generator) + ): + self.dataset.generator.manual_seed(self.seed + self.epoch) + + first_batch = None + current_batch = [] + for element in self.dataset: + self.num_examples += 1 + current_batch.append(element) + # Wait to have a full batch before yielding elements. + if len(current_batch) == self.batch_size: + for batch in current_batch: + yield batch + if first_batch is None: + first_batch = batch.copy() + current_batch = [] + + # pad the last batch with elements from the beginning. + while self.num_examples < self.length: + add_num = self.batch_size - len(current_batch) + self.num_examples += add_num + current_batch += [first_batch] * add_num + for batch in current_batch: + yield batch + current_batch = [] diff --git a/finetune/data/qa.py b/finetune/data/qa.py new file mode 100644 index 0000000..fcb352b --- /dev/null +++ b/finetune/data/qa.py @@ -0,0 +1,581 @@ +import copy +import json +import re + +import numpy as np +import torch +import transformers.data.processors.squad as squad +from tqdm import tqdm + + +class SquadExample: + """ + A single training/test example for the Squad dataset, as loaded from disk. + + Args: + qas_id: The example's unique identifier + question_text: The question string + context_text: The context string + answer_text: The answer string + start_position_character: The character position of the start of the answer + title: The title of the example + answers: None by default, this is used during evaluation. Holds answers as well as their start positions. + is_impossible: False by default, set to True if the example has no possible answer. + """ + + def __init__( + self, + qas_id, + question_text, + context_text, + answer_text, + start_position_character, + title, + answers=[], + is_impossible=False, + doc_tokens=None, + char_to_word_offset=None, + ): + self.qas_id = qas_id + self.question_text = question_text + self.context_text = context_text + self.answer_text = answer_text + self.title = title + self.is_impossible = is_impossible + self.answers = answers + + self.start_position, self.end_position = 0, 0 + + if doc_tokens is None: + doc_tokens = [] + char_to_word_offset = [] + prev_is_whitespace = True + + # Split on whitespace so that different tokens may be attributed to their original position. + for c in self.context_text: + if squad._is_whitespace(c): + prev_is_whitespace = True + else: + if prev_is_whitespace: + doc_tokens.append(c) + else: + doc_tokens[-1] += c + prev_is_whitespace = False + char_to_word_offset.append(len(doc_tokens) - 1) + + self.doc_tokens = doc_tokens + self.char_to_word_offset = char_to_word_offset + + # Start and end positions only has a value during evaluation. + if start_position_character is not None and not is_impossible: + self.start_position = char_to_word_offset[start_position_character] + self.end_position = char_to_word_offset[ + min(start_position_character + len(answer_text) - 1, len(char_to_word_offset) - 1) + ] + + +def squad_convert_example_to_features( + example, + tokenizer, + max_seq_length, + doc_stride, + max_query_length, + padding_strategy, + is_training, + tok_to_orig_index=None, + orig_to_tok_index=None, + all_doc_tokens=None, +): + features = [] + if is_training and not example.is_impossible: + # Get start and end position + start_position = example.start_position + end_position = example.end_position + + # If the answer cannot be found in the text, then skip this example. + actual_text = " ".join(example.doc_tokens[start_position : (end_position + 1)]) + cleaned_answer_text = " ".join(squad.whitespace_tokenize(example.answer_text)) + if actual_text.find(cleaned_answer_text) == -1: + return [], None, None, None + + if tok_to_orig_index is None: + tok_to_orig_index = [] + orig_to_tok_index = [] + all_doc_tokens = [] + for (i, token) in enumerate(example.doc_tokens): + orig_to_tok_index.append(len(all_doc_tokens)) + if tokenizer.__class__.__name__ in [ + "RobertaTokenizer", + "LongformerTokenizer", + "BartTokenizer", + "RobertaTokenizerFast", + "LongformerTokenizerFast", + "BartTokenizerFast", + ]: + sub_tokens = tokenizer.tokenize(token, add_prefix_space=True) + else: + sub_tokens = tokenizer.tokenize(token) + for sub_token in sub_tokens: + tok_to_orig_index.append(i) + all_doc_tokens.append(sub_token) + + if is_training and not example.is_impossible: + tok_start_position = orig_to_tok_index[example.start_position] + if example.end_position < len(example.doc_tokens) - 1: + tok_end_position = orig_to_tok_index[example.end_position + 1] - 1 + else: + tok_end_position = len(all_doc_tokens) - 1 + + (tok_start_position, tok_end_position) = squad._improve_answer_span( + all_doc_tokens, tok_start_position, tok_end_position, tokenizer, example.answer_text + ) + + spans = [] + + truncated_query = tokenizer.encode( + example.question_text, add_special_tokens=False, truncation=True, max_length=max_query_length + ) + + # Tokenizers who insert 2 SEP tokens in-between & need to have special handling + # in the way they compute mask of added tokens. + tokenizer_type = type(tokenizer).__name__.replace("Tokenizer", "").lower() + sequence_added_tokens = ( + tokenizer.model_max_length - tokenizer.max_len_single_sentence + 1 + if tokenizer_type in squad.MULTI_SEP_TOKENS_TOKENIZERS_SET + else tokenizer.model_max_length - tokenizer.max_len_single_sentence + ) + sequence_pair_added_tokens = tokenizer.model_max_length - tokenizer.max_len_sentences_pair + + span_doc_tokens = all_doc_tokens + while len(spans) * doc_stride < len(all_doc_tokens): + + # Define the side we want to truncate / pad and the text/pair sorting + if tokenizer.padding_side == "right": + texts = truncated_query + pairs = span_doc_tokens + truncation = squad.TruncationStrategy.ONLY_SECOND.value + else: + texts = span_doc_tokens + pairs = truncated_query + truncation = squad.TruncationStrategy.ONLY_FIRST.value + + encoded_dict = tokenizer.encode_plus( # TODO(thom) update this logic + texts, + pairs, + truncation=truncation, + padding=padding_strategy, + max_length=max_seq_length, + return_overflowing_tokens=True, + stride=max_seq_length - doc_stride - len(truncated_query) - sequence_pair_added_tokens, + return_token_type_ids=True, + ) + + paragraph_len = min( + len(all_doc_tokens) - len(spans) * doc_stride, + max_seq_length - len(truncated_query) - sequence_pair_added_tokens, + ) + + if tokenizer.pad_token_id in encoded_dict["input_ids"]: + if tokenizer.padding_side == "right": + non_padded_ids = encoded_dict["input_ids"][: encoded_dict["input_ids"].index(tokenizer.pad_token_id)] + else: + last_padding_id_position = ( + len(encoded_dict["input_ids"]) - 1 - encoded_dict["input_ids"][::-1].index(tokenizer.pad_token_id) + ) + non_padded_ids = encoded_dict["input_ids"][last_padding_id_position + 1 :] + + else: + non_padded_ids = encoded_dict["input_ids"] + + tokens = tokenizer.convert_ids_to_tokens(non_padded_ids) + + token_to_orig_map = {} + for i in range(paragraph_len): + index = len(truncated_query) + sequence_added_tokens + i if tokenizer.padding_side == "right" else i + token_to_orig_map[index] = tok_to_orig_index[len(spans) * doc_stride + i] + + encoded_dict["paragraph_len"] = paragraph_len + encoded_dict["tokens"] = tokens + encoded_dict["token_to_orig_map"] = token_to_orig_map + encoded_dict["truncated_query_with_special_tokens_length"] = len(truncated_query) + sequence_added_tokens + encoded_dict["token_is_max_context"] = {} + encoded_dict["start"] = len(spans) * doc_stride + encoded_dict["length"] = paragraph_len + + spans.append(encoded_dict) + + if "overflowing_tokens" not in encoded_dict or ( + "overflowing_tokens" in encoded_dict and len(encoded_dict["overflowing_tokens"]) == 0 + ): + break + span_doc_tokens = encoded_dict["overflowing_tokens"] + + for doc_span_index in range(len(spans)): + for j in range(spans[doc_span_index]["paragraph_len"]): + is_max_context = squad._new_check_is_max_context(spans, doc_span_index, doc_span_index * doc_stride + j) + index = ( + j + if tokenizer.padding_side == "left" + else spans[doc_span_index]["truncated_query_with_special_tokens_length"] + j + ) + spans[doc_span_index]["token_is_max_context"][index] = is_max_context + + for span in spans: + # Identify the position of the CLS token + cls_index = span["input_ids"].index(tokenizer.cls_token_id) + + p_mask = np.array([]) + + # # p_mask: mask with 1 for token than cannot be in the answer (0 for token which can be in an answer) + # # Original TF implementation also keep the classification token (set to 0) + # p_mask = np.ones_like(span["token_type_ids"]) + # if tokenizer.padding_side == "right": + # p_mask[len(truncated_query) + sequence_added_tokens :] = 0 + # else: + # p_mask[-len(span["tokens"]) : -(len(truncated_query) + sequence_added_tokens)] = 0 + + # pad_token_indices = np.where(span["input_ids"] == tokenizer.pad_token_id) + # special_token_indices = np.asarray( + # tokenizer.get_special_tokens_mask(span["input_ids"], already_has_special_tokens=True) + # ).nonzero() + + # p_mask[pad_token_indices] = 1 + # p_mask[special_token_indices] = 1 + + # # Set the cls index to 0: the CLS index can be used for impossible answers + # p_mask[cls_index] = 0 + + span_is_impossible = example.is_impossible + start_position = 0 + end_position = 0 + if is_training and not span_is_impossible: + # For training, if our document chunk does not contain an annotation + # we throw it out, since there is nothing to predict. + doc_start = span["start"] + doc_end = span["start"] + span["length"] - 1 + out_of_span = False + + if not (tok_start_position >= doc_start and tok_end_position <= doc_end): + out_of_span = True + + if out_of_span: + start_position = cls_index + end_position = cls_index + span_is_impossible = True + else: + if tokenizer.padding_side == "left": + doc_offset = 0 + else: + doc_offset = len(truncated_query) + sequence_added_tokens + + start_position = tok_start_position - doc_start + doc_offset + end_position = tok_end_position - doc_start + doc_offset + + features.append( + squad.SquadFeatures( + span["input_ids"], + span["attention_mask"], + span["token_type_ids"], + cls_index, + p_mask.tolist(), + example_index=0, # Can not set unique_id and example_index here. They will be set after multiple processing. + unique_id=0, + paragraph_len=span["paragraph_len"], + token_is_max_context=span["token_is_max_context"], + tokens=span["tokens"], + token_to_orig_map=span["token_to_orig_map"], + start_position=start_position, + end_position=end_position, + is_impossible=span_is_impossible, + qas_id=example.qas_id, + ) + ) + return features, tok_to_orig_index, orig_to_tok_index, all_doc_tokens + + +def sample_writer(data, config, tokenizer, is_train): + """process and write single 'paragraph-context' from squad-formed QA dataset""" + context = data["context"] + example = None + tok_to_orig_index = None + orig_to_tok_index = None + all_doc_tokens = None + write_data = [] + for qas in data["qas"]: + is_impossible = qas.get("is_impossible", False) + example = SquadExample( + qas_id=qas["id"], + question_text=qas["question"], + context_text=context, + answer_text="" if is_impossible else qas["answers"][0]["text"], + start_position_character=0 if is_impossible else qas["answers"][0]["answer_start"], + title="", + answers=qas["answers"], + is_impossible=is_impossible, + doc_tokens=(None if example is None else example.doc_tokens), + char_to_word_offset=(None if example is None else example.char_to_word_offset), + ) + features, tok_to_orig_index, orig_to_tok_index, all_doc_tokens = squad_convert_example_to_features( + example=example, + tokenizer=tokenizer, + max_seq_length=config.max_seq_length, + doc_stride=config.doc_stride, + max_query_length=config.max_query_length, + padding_strategy="max_length", + is_training=is_train, + tok_to_orig_index=tok_to_orig_index, + orig_to_tok_index=orig_to_tok_index, + all_doc_tokens=all_doc_tokens, + ) + for i, feature in enumerate(features): + write_datum = { + "input_ids": feature.input_ids, + "attention_mask": feature.attention_mask, + "token_type_ids": feature.token_type_ids, + "start_position": feature.start_position, + "end_position": feature.end_position, + } + if not is_train: + write_datum["id"] = feature.qas_id + write_datum["unique_id"] = "{}_{}".format(feature.qas_id, i) + write_datum["tokens"] = feature.tokens + write_datum["token_to_orig_map"] = feature.token_to_orig_map + write_datum["paragraph_len"] = feature.paragraph_len + write_datum["token_is_max_context"] = feature.token_is_max_context + if i == 0: # only store example data for the first feature from the example + write_datum["is_impossible"] = example.is_impossible + write_datum["answers"] = example.answers + write_datum["doc_tokens"] = example.doc_tokens + else: + write_datum["is_impossible"] = None + write_datum["answers"] = None + write_datum["doc_tokens"] = None + write_data.append(write_datum) + + return write_data + + +def process_korquad_1(config, data_file, train): + with open(data_file) as handle: + load_data = json.load(handle)["data"] + data = [] + for datum in load_data: + data.extend(datum["paragraphs"]) + + return data + + +def process_korquad_2(config, data_file, train): + TAGS = [ + "", + "", + "", + "", + "", + "", + "", + "", + "", + "
", + "
", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "

", + "

", + "", + "", + "", + "", + "
", + "
", + "

", + "

", + "

", + "

", + "

", + "

", + "
", + "
", + "", + "", + "", + "", + "", + "", + "", + "", + "[ํŽธ์ง‘]", + ] + + def remove_tags(text): + # only retain meaningful html tags to extract answers + # tags like
  • will remain + text = re.sub(".*?", " ", text) + text = text.replace('rowspan="', "r") # summarize attribute name for col, row spans + text = text.replace('colspan="', "c") # which indicate merged table cells + for tag in TAGS: + text = text.replace(tag, "") + text = re.sub(" +", " ", text) + text = re.sub("\n+", "\n", text) + return text + + with open(data_file) as handle: + data = json.load(handle)["data"] + + qas_num = [] + for datum in data: + context = datum["context"] + datum["context"] = remove_tags(context) + del datum["raw_html"] + if train and not config.all_korquad_2_sample: # only use first sample from context for train split + datum["qas"] = datum["qas"][:1] + for qas in datum["qas"]: + del qas["answer"]["html_answer_text"] + del qas["answer"]["html_answer_start"] + # qas["id"] = "{}-{}".format(i, qas["id"]) + answer_prev_text = context[: qas["answer"]["answer_start"]] + answer_prev_len = len(answer_prev_text) + remove_tag_text = remove_tags(answer_prev_text) + # adjust span position according to text without tags + qas["answer"]["answer_start"] -= answer_prev_len - len(remove_tag_text) + qas["answer"]["text"] = remove_tags(qas["answer"]["text"]) + qas["answers"] = [qas.pop("answer")] + qas_num.append(len(datum["qas"])) + # some paragraph in korquad_2 have too many question samples + # it will slow down the progress of multiprocessing job + # so, i split them + limit_qas_num = int(np.percentile(np.array(qas_num), 50)) + flat_data = [] + for datum in data: + qas_num = len(datum["qas"]) + if qas_num > limit_qas_num: + for j in range(0, qas_num, limit_qas_num): + _datum = copy.deepcopy(datum) + _datum["qas"] = datum["qas"][j : j + limit_qas_num] + flat_data.append(_datum) + else: + flat_data.append(datum) + data = flat_data + + return data + + +def process_kluemrc(config, data_file, train): + data = [] + with open(data_file) as handle: + for datum in json.load(handle)["data"]: + datum = datum["paragraphs"] + for qas in datum: + for q in qas["qas"]: + q["id"] = q.pop("guid") + data.extend(datum) + return data + + +def process_tydiqa(config, data_file, train): + data = [] + total = sum([1 for _ in open(data_file)]) + for line in tqdm(open(data_file), total=total, dynamic_ncols=True): + datum = json.loads(line) + + if datum["language"].lower().strip() != "korean": + continue + + span_byte_map = {} + prev_bytes = 0 + for i, char in enumerate(datum["document_plaintext"]): + byte_len = len(char.encode("utf-8")) + for j in range(byte_len): + span_byte_map[prev_bytes + j] = i + prev_bytes += byte_len + + answers = [] + bool_answers = [] + is_impossible = False + for annot in datum["annotations"]: + spans = annot["minimal_answer"] + start = spans["plaintext_start_byte"] + end = spans["plaintext_end_byte"] + + yesno = None if annot["yes_no_answer"] == "NONE" else annot["yes_no_answer"] + if yesno is not None: + bool_answers.append(yesno) + continue + + if spans["plaintext_start_byte"] == -1: + is_impossible = True + else: + start = span_byte_map[start] + end = span_byte_map[end] + answers.append( + { + "text": datum["document_plaintext"][start:end], + "answer_start": start, + } + ) + + # skip boolqa samples + if len(bool_answers) != 0: + continue + + if len(answers) != 0: + is_impossible = False + else: + is_impossible = True + + data.append( + { + "context": datum["document_plaintext"], + "qas": [ + { + "id": len(data), + "question": datum["question_text"], + "is_impossible": is_impossible, + "answers": answers, + } + ], + } + ) + + return data + + +process_map = { + "korquad_1": process_korquad_1, + "korquad_2": process_korquad_2, + "tydiqa": process_tydiqa, + "kluemrc": process_kluemrc, +} + + +def collate_fn(features): + input_ids = [sample["input_ids"] for sample in features] + attention_mask = [sample["attention_mask"] for sample in features] + token_type_ids = [sample["token_type_ids"] for sample in features] + start_position = [sample["start_position"] for sample in features] + end_position = [sample["end_position"] for sample in features] + + input_ids = torch.tensor(np.array(input_ids).astype(np.int64), dtype=torch.long) + attention_mask = torch.tensor(np.array(attention_mask).astype(np.int8), dtype=torch.long) + token_type_ids = torch.tensor(np.array(token_type_ids).astype(np.int8), dtype=torch.long) + start_position = torch.tensor(np.array(start_position).astype(np.int64), dtype=torch.long) + end_position = torch.tensor(np.array(end_position).astype(np.int64), dtype=torch.long) + inputs = { + "input_ids": input_ids, + "attention_mask": attention_mask, + "token_type_ids": token_type_ids, + "start_positions": start_position, + "end_positions": end_position, + } + if "unique_id" in features[0]: + inputs["unique_id"] = [sample["unique_id"] for sample in features] + return inputs diff --git a/finetune/download_qa_dataset.sh b/finetune/download_qa_dataset.sh new file mode 100755 index 0000000..7f95a7d --- /dev/null +++ b/finetune/download_qa_dataset.sh @@ -0,0 +1,48 @@ +#!/bin/bash + +mkdir cache +cd cache + +# Download qa datasets +## 1. tydiqa +mkdir -p tydiqa +wget -P tydiqa https://storage.googleapis.com/tydiqa/v1.0/tydiqa-v1.0-train.jsonl.gz +wget -P tydiqa https://storage.googleapis.com/tydiqa/v1.0/tydiqa-v1.0-dev.jsonl.gz +gzip -d tydiqa/tydiqa-v1.0-train.jsonl.gz +gzip -d tydiqa/tydiqa-v1.0-dev.jsonl.gz + +## 2. korquad 2.1 +mkdir -p korquad_2/train +for var in {0..12} +do + var=$(printf %02d $var) + wget -P korquad_2/train https://github.com/korquad/korquad.github.io/raw/master/dataset/KorQuAD_2.1/train/KorQuAD_2.1_train_${var}.zip +done + +mkdir -p korquad_2/dev +for var in {0..1} +do + var=$(printf %02d $var) + wget -P korquad_2/dev https://github.com/korquad/korquad.github.io/raw/master/dataset/KorQuAD_2.1/dev/KorQuAD_2.1_dev_${var}.zip +done + +cd korquad_2 +cd train +unzip '*.zip' +rm *.zip +cd .. + +cd dev +unzip '*.zip' +rm *.zip +cd .. + +## 3. korquad 1.0 +# mkdir korquad_1 +# wget -P korquad_1 https://github.com/korquad/korquad.github.io/raw/master/dataset/KorQuAD_v1.0_train.json +# wget -P korquad_1 https://github.com/korquad/korquad.github.io/raw/master/dataset/KorQuAD_v1.0_dev.json + +## 4. klue mrc +# mkdir -p kluemrc +# wget -P kluemrc https://raw.githubusercontent.com/KLUE-benchmark/KLUE/v1.1.0/klue_benchmark/klue-mrc-v1.1/klue-mrc-v1.1_train.json +# wget -P kluemrc https://raw.githubusercontent.com/KLUE-benchmark/KLUE/v1.1.0/klue_benchmark/klue-mrc-v1.1/klue-mrc-v1.1_dev.json diff --git a/finetune/evaluate/__init__.py b/finetune/evaluate/__init__.py new file mode 100644 index 0000000..84d6a28 --- /dev/null +++ b/finetune/evaluate/__init__.py @@ -0,0 +1,4 @@ +from evaluate.cls import eval_cls +from evaluate.qa import eval_qa + +EVAL_FUNC_MAP = {"cls": eval_cls, "qa": eval_qa} diff --git a/finetune/evaluate/cls.py b/finetune/evaluate/cls.py new file mode 100644 index 0000000..e10e272 --- /dev/null +++ b/finetune/evaluate/cls.py @@ -0,0 +1,33 @@ +from functools import partial + +import numpy as np +import sklearn.metrics as sklearn_metrics + +binary_metrics = { + "accuracy": sklearn_metrics.accuracy_score, + "precision": sklearn_metrics.precision_score, + "recall": sklearn_metrics.recall_score, + "f1": sklearn_metrics.f1_score, + "matthews_corrcoef": sklearn_metrics.matthews_corrcoef, + "roc_auc": sklearn_metrics.roc_auc_score, +} + + +metrics = { + "accuracy": sklearn_metrics.accuracy_score, + "f1-macro": partial(sklearn_metrics.f1_score, average="macro"), +} + + +def eval_cls(results, **kwargs): + predictions = np.array([result["prediction"] for result in results]) + labels = np.array([result["label"] for result in results]) + is_binary = len(set(labels.tolist())) < 3 + results = { + metric: round(f(labels, predictions) * 100, 2) + for metric, f in (binary_metrics.items() if is_binary else metrics.items()) + } + return { + "results": results, + "best_score": results["f1" if is_binary else "f1-macro"], + } diff --git a/finetune/evaluate/qa.py b/finetune/evaluate/qa.py new file mode 100644 index 0000000..b1a103b --- /dev/null +++ b/finetune/evaluate/qa.py @@ -0,0 +1,593 @@ +import collections +import json +import os +import re +import shutil +import string +from collections import Counter + +from bs4 import BeautifulSoup +from tqdm import tqdm +from transformers.data.metrics.squad_metrics import _compute_softmax, _get_best_indexes, get_final_text +from transformers.data.processors.squad import SquadFeatures, SquadResult + +"""KorQuAD 2.0์— ๋Œ€ํ•œ ๊ณต์‹ ํ‰๊ฐ€ ์Šคํฌ๋ฆฝํŠธ """ +"""๋ณธ ์Šคํฌ๋ฆฝํŠธ๋Š” SQuAD v1.1 ํ‰๊ฐ€ ์Šคํฌ๋ฆฝํŠธ https://rajpurkar.github.io/SQuAD-explorer/ ๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ ์ž‘์„ฑ๋จ.""" + + +def normalize_answer(s): + def tag_clean(t): + return BeautifulSoup(t, features="lxml").get_text() + + def remove_(text): + """๋ถˆํ•„์š”ํ•œ ๊ธฐํ˜ธ ์ œ๊ฑฐ""" + text = re.sub("'", " ", text) + text = re.sub('"', " ", text) + text = re.sub("ใ€Š", " ", text) + text = re.sub("ใ€‹", " ", text) + text = re.sub("<", " ", text) + text = re.sub(">", " ", text) + text = re.sub("ใ€ˆ", " ", text) + text = re.sub("ใ€‰", " ", text) + text = re.sub(r"\(", " ", text) + text = re.sub(r"\)", " ", text) + text = re.sub("โ€˜", " ", text) + text = re.sub("โ€™", " ", text) + return text + + def white_space_fix(text): + return " ".join(text.split()).replace("\n", "").replace("\t", "").replace(" ", "") + + def remove_punc(text): + exclude = set(string.punctuation) + return "".join(ch for ch in text if ch not in exclude) + + def lower(text): + return text.lower() + + return white_space_fix(remove_punc(lower(remove_(tag_clean(s))))) + + +def f1_score(prediction, ground_truth): + prediction_tokens = normalize_answer(prediction).split() + ground_truth_tokens = normalize_answer(ground_truth).split() + + # F1 by character + prediction_Char = [] + for tok in prediction_tokens: + now = [a for a in tok] + prediction_Char.extend(now) + + ground_truth_Char = [] + for tok in ground_truth_tokens: + now = [a for a in tok] + ground_truth_Char.extend(now) + + common = Counter(prediction_Char) & Counter(ground_truth_Char) + num_same = sum(common.values()) + if num_same == 0: + return 0 + + precision = 1.0 * num_same / len(prediction_Char) + recall = 1.0 * num_same / len(ground_truth_Char) + f1 = (2 * precision * recall) / (precision + recall) + + return f1 + + +def exact_match_score(prediction, ground_truth): + return normalize_answer(prediction) == normalize_answer(ground_truth) + + +def get_raw_scores(examples, preds): + """ + Computes the exact and f1 scores from the examples and the model predictions + """ + exact_scores = {} + f1_scores = {} + + for example in examples: + qas_id = example.qas_id + gold_answers = [answer["text"] for answer in example.answers if normalize_answer(answer["text"])] + + if not gold_answers: + # For unanswerable questions, only correct answer is empty string + gold_answers = [""] + + if qas_id not in preds: + print("Missing prediction for %s" % qas_id) + continue + + prediction = preds[qas_id] + + exact_scores[qas_id] = max(exact_match_score(a, prediction) for a in gold_answers) + f1_scores[qas_id] = max(f1_score(a, prediction) for a in gold_answers) + + return exact_scores, f1_scores + + +def apply_no_ans_threshold(scores, na_probs, qid_to_has_ans, na_prob_thresh): + new_scores = {} + for qid, s in scores.items(): + pred_na = na_probs[qid] > na_prob_thresh + if pred_na: + new_scores[qid] = float(not qid_to_has_ans[qid]) + else: + new_scores[qid] = s + return new_scores + + +def make_eval_dict(exact_scores, f1_scores, qid_list=None): + if not qid_list: + total = len(exact_scores) + return collections.OrderedDict( + [ + ("exact", 100.0 * sum(exact_scores.values()) / total), + ("f1", 100.0 * sum(f1_scores.values()) / total), + ("total", total), + ] + ) + else: + total = len(qid_list) + return collections.OrderedDict( + [ + ("exact", 100.0 * sum(exact_scores[k] for k in qid_list) / total), + ("f1", 100.0 * sum(f1_scores[k] for k in qid_list) / total), + ("total", total), + ] + ) + + +def merge_eval(main_eval, new_eval, prefix): + for k in new_eval: + main_eval["%s_%s" % (prefix, k)] = new_eval[k] + + +def find_best_thresh_v2(preds, scores, na_probs, qid_to_has_ans): + num_no_ans = sum(1 for k in qid_to_has_ans if not qid_to_has_ans[k]) + cur_score = num_no_ans + best_score = cur_score + best_thresh = 0.0 + qid_list = sorted(na_probs, key=lambda k: na_probs[k]) + for i, qid in enumerate(qid_list): + if qid not in scores: + continue + if qid_to_has_ans[qid]: + diff = scores[qid] + else: + if preds[qid]: + diff = -1 + else: + diff = 0 + cur_score += diff + if cur_score > best_score: + best_score = cur_score + best_thresh = na_probs[qid] + + has_ans_score, has_ans_cnt = 0, 0 + for qid in qid_list: + if not qid_to_has_ans[qid]: + continue + has_ans_cnt += 1 + + if qid not in scores: + continue + has_ans_score += scores[qid] + + return 100.0 * best_score / len(scores), best_thresh, 1.0 * has_ans_score / has_ans_cnt + + +def find_all_best_thresh_v2(main_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans): + best_exact, exact_thresh, has_ans_exact = find_best_thresh_v2(preds, exact_raw, na_probs, qid_to_has_ans) + best_f1, f1_thresh, has_ans_f1 = find_best_thresh_v2(preds, f1_raw, na_probs, qid_to_has_ans) + main_eval["best_exact"] = best_exact + main_eval["best_exact_thresh"] = exact_thresh + main_eval["best_f1"] = best_f1 + main_eval["best_f1_thresh"] = f1_thresh + main_eval["has_ans_exact"] = has_ans_exact + main_eval["has_ans_f1"] = has_ans_f1 + + +def find_best_thresh(preds, scores, na_probs, qid_to_has_ans): + num_no_ans = sum(1 for k in qid_to_has_ans if not qid_to_has_ans[k]) + cur_score = num_no_ans + best_score = cur_score + best_thresh = 0.0 + qid_list = sorted(na_probs, key=lambda k: na_probs[k]) + for _, qid in enumerate(qid_list): + if qid not in scores: + continue + if qid_to_has_ans[qid]: + diff = scores[qid] + else: + if preds[qid]: + diff = -1 + else: + diff = 0 + cur_score += diff + if cur_score > best_score: + best_score = cur_score + best_thresh = na_probs[qid] + return 100.0 * best_score / len(scores), best_thresh + + +def find_all_best_thresh(main_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans): + best_exact, exact_thresh = find_best_thresh(preds, exact_raw, na_probs, qid_to_has_ans) + best_f1, f1_thresh = find_best_thresh(preds, f1_raw, na_probs, qid_to_has_ans) + + main_eval["best_exact"] = best_exact + main_eval["best_exact_thresh"] = exact_thresh + main_eval["best_f1"] = best_f1 + main_eval["best_f1_thresh"] = f1_thresh + + +def compute_predictions_logits( + all_examples, + all_features, + all_results, + n_best_size, + max_answer_length, + do_lower_case, + output_prediction_file, + output_nbest_file, + output_null_log_odds_file, + verbose_logging, + version_2_with_negative, + null_score_diff_threshold, + tokenizer, +): + """Write final predictions to the json file and log-odds of null if needed.""" + + example_index_to_features = collections.defaultdict(list) + for feature in all_features: + example_index_to_features[feature.example_index].append(feature) + + unique_id_to_result = {} + for result in all_results: + unique_id_to_result[result.unique_id] = result + + _PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name + "PrelimPrediction", ["feature_index", "start_index", "end_index", "start_logit", "end_logit"] + ) + + all_predictions = collections.OrderedDict() + # all_nbest_json = collections.OrderedDict() + scores_diff_json = collections.OrderedDict() + + for (example_index, example) in enumerate(all_examples): + features = example_index_to_features[example_index] + + prelim_predictions = [] + # keep track of the minimum score of null start+end of position 0 + score_null = 1000000 # large and positive + min_null_feature_index = 0 # the paragraph slice with min null score + null_start_logit = 0 # the start logit at the slice with min null score + null_end_logit = 0 # the end logit at the slice with min null score + for (feature_index, feature) in enumerate(features): + result = unique_id_to_result[feature.unique_id] + start_indexes = _get_best_indexes(result.start_logits, n_best_size) + end_indexes = _get_best_indexes(result.end_logits, n_best_size) + # if we could have irrelevant answers, get the min score of irrelevant + if version_2_with_negative: + feature_null_score = result.start_logits[0] + result.end_logits[0] + if feature_null_score < score_null: + score_null = feature_null_score + min_null_feature_index = feature_index + null_start_logit = result.start_logits[0] + null_end_logit = result.end_logits[0] + for start_index in start_indexes: + for end_index in end_indexes: + # We could hypothetically create invalid predictions, e.g., predict + # that the start of the span is in the question. We throw out all + # invalid predictions. + if start_index >= len(feature.tokens): + continue + if end_index >= len(feature.tokens): + continue + if start_index not in feature.token_to_orig_map: + continue + if end_index not in feature.token_to_orig_map: + continue + if not feature.token_is_max_context.get(start_index, False): + continue + if end_index < start_index: + continue + length = end_index - start_index + 1 + if length > max_answer_length: + continue + prelim_predictions.append( + _PrelimPrediction( + feature_index=feature_index, + start_index=start_index, + end_index=end_index, + start_logit=result.start_logits[start_index], + end_logit=result.end_logits[end_index], + ) + ) + if version_2_with_negative: + prelim_predictions.append( + _PrelimPrediction( + feature_index=min_null_feature_index, + start_index=0, + end_index=0, + start_logit=null_start_logit, + end_logit=null_end_logit, + ) + ) + prelim_predictions = sorted(prelim_predictions, key=lambda x: (x.start_logit + x.end_logit), reverse=True) + + _NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name + "NbestPrediction", ["text", "start_logit", "end_logit", "tok_text", "orig_text"] + ) + + seen_predictions = {} + nbest = [] + for pred in prelim_predictions: + if len(nbest) >= n_best_size: + break + feature = features[pred.feature_index] + if pred.start_index > 0: # this is a non-null prediction + tok_tokens = feature.tokens[pred.start_index : (pred.end_index + 1)] + orig_doc_start = feature.token_to_orig_map[pred.start_index] + orig_doc_end = feature.token_to_orig_map[pred.end_index] + orig_tokens = example.doc_tokens[orig_doc_start : (orig_doc_end + 1)] + + tok_text = tokenizer.convert_tokens_to_string(tok_tokens) + + # tok_text = " ".join(tok_tokens) + # + # # De-tokenize WordPieces that have been split off. + # tok_text = tok_text.replace(" ##", "") + # tok_text = tok_text.replace("##", "") + + # Clean whitespace + tok_text = tok_text.strip() + tok_text = " ".join(tok_text.split()) + orig_text = " ".join(orig_tokens) + + final_text = orig_text + # final_text = get_final_text(tok_text, orig_text, do_lower_case, verbose_logging) + if final_text in seen_predictions: + continue + + seen_predictions[final_text] = True + else: + tok_text = "" + orig_text = "" + final_text = "" + seen_predictions[final_text] = True + + nbest.append( + _NbestPrediction( + text=final_text, + start_logit=pred.start_logit, + end_logit=pred.end_logit, + tok_text=tok_text, + orig_text=orig_text, + ) + ) + # if we didn't include the empty option in the n-best, include it + if version_2_with_negative: + if "" not in seen_predictions: + nbest.append( + _NbestPrediction( + text="", start_logit=null_start_logit, end_logit=null_end_logit, tok_text="", orig_text="" + ) + ) + + # In very rare edge cases we could only have single null prediction. + # So we just create a nonce prediction in this case to avoid failure. + if len(nbest) == 1: + nbest.insert( + 0, _NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0, tok_text="", orig_text="") + ) + + # In very rare edge cases we could have no valid predictions. So we + # just create a nonce prediction in this case to avoid failure. + if not nbest: + nbest.append(_NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0, tok_text="", orig_text="")) + + assert len(nbest) >= 1, "No valid predictions" + + total_scores = [] + best_non_null_entry = None + for entry in nbest: + total_scores.append(entry.start_logit + entry.end_logit) + if not best_non_null_entry: + if entry.text: + best_non_null_entry = entry + + probs = _compute_softmax(total_scores) + + nbest_json = [] + for (i, entry) in enumerate(nbest): + output = collections.OrderedDict() + output["text"] = entry.text + output["probability"] = probs[i] + output["start_logit"] = entry.start_logit + output["end_logit"] = entry.end_logit + output["tok_text"] = entry.tok_text + output["orig_text"] = entry.orig_text + nbest_json.append(output) + + assert len(nbest_json) >= 1, "No valid predictions" + + if not version_2_with_negative: + # all_predictions[example.qas_id] = nbest_json[0]["text"] + all_predictions[example.qas_id] = get_final_text( + nbest_json[0]["tok_text"], nbest_json[0]["orig_text"], do_lower_case, verbose_logging + ) + else: + # predict "" iff the null score - the score of best non-null > threshold + score_diff = score_null - best_non_null_entry.start_logit - (best_non_null_entry.end_logit) + scores_diff_json[example.qas_id] = score_diff + if score_diff > null_score_diff_threshold: + all_predictions[example.qas_id] = "" + else: + # all_predictions[example.qas_id] = best_non_null_entry.text + all_predictions[example.qas_id] = get_final_text( + best_non_null_entry.tok_text, best_non_null_entry.orig_text, do_lower_case, verbose_logging + ) + + return all_predictions + + +def squad_evaluate(examples, preds, no_answer_probs=None, no_answer_probability_threshold=1.0): + qas_id_to_has_answer = {example.qas_id: bool(example.answers) for example in examples} + has_answer_qids = [qas_id for qas_id, has_answer in qas_id_to_has_answer.items() if has_answer] + no_answer_qids = [qas_id for qas_id, has_answer in qas_id_to_has_answer.items() if not has_answer] + + if no_answer_probs is None: + no_answer_probs = {k: 0.0 for k in preds} + + exact, f1 = get_raw_scores(examples, preds) + + exact_threshold = apply_no_ans_threshold( + exact, no_answer_probs, qas_id_to_has_answer, no_answer_probability_threshold + ) + f1_threshold = apply_no_ans_threshold(f1, no_answer_probs, qas_id_to_has_answer, no_answer_probability_threshold) + + evaluation = make_eval_dict(exact_threshold, f1_threshold) + + if has_answer_qids: + has_ans_eval = make_eval_dict(exact_threshold, f1_threshold, qid_list=has_answer_qids) + merge_eval(evaluation, has_ans_eval, "HasAns") + + if no_answer_qids: + no_ans_eval = make_eval_dict(exact_threshold, f1_threshold, qid_list=no_answer_qids) + merge_eval(evaluation, no_ans_eval, "NoAns") + + if no_answer_probs: + find_all_best_thresh(evaluation, preds, exact, f1, no_answer_probs, qas_id_to_has_answer) + + return evaluation, (exact, f1) + + +class Example: + def __init__(self, qas_id, answers, is_impossible, doc_tokens): + self.qas_id = qas_id + self.answers = answers + self.is_impossible = is_impossible + self.doc_tokens = doc_tokens + + def __str__(self): + text = "" + text += str(self.qas_id) + "\n" + text += str(self.answers) + "\n" + text += str(self.is_impossible) + "\n" + return text + + +def eval_qa(config, loader, tokenizer, results, **kwargs): + pred_dir = os.path.join(config.output_dir, config.task, "predictions") + batch_size = 10 + total_feature_num = int(len(loader) * config.eval_batch_size) + + examples, features, results, predictions = {}, {}, {}, {} + for i, feature in tqdm( + enumerate(loader.dataset), total=total_feature_num, dynamic_ncols=True, desc="compute scores..." + ): + qas_id = feature["id"] + + if qas_id not in examples and feature.get("answers", None) is not None: + examples[qas_id] = Example( + qas_id=feature["id"], + answers=feature["answers"], + is_impossible=feature["is_impossible"], + doc_tokens=feature["doc_tokens"], + ) + + if qas_id not in features: + features[qas_id] = [] + features[qas_id].append( + SquadFeatures( + input_ids=None, + attention_mask=None, + token_type_ids=None, + cls_index=None, + p_mask=None, + example_index=None, + unique_id=feature["unique_id"], + paragraph_len=feature["paragraph_len"], + token_is_max_context={int(k): v for k, v in feature["token_is_max_context"].items() if v is not None}, + tokens=feature["tokens"], + token_to_orig_map={int(k): v for k, v in feature["token_to_orig_map"].items() if v is not None}, + start_position=feature["start_position"], + end_position=feature["end_position"], + is_impossible=False, + qas_id=feature["id"], + ) + ) + + result_path = os.path.join(pred_dir, str(qas_id)) + if qas_id not in results and os.path.exists(result_path): + load_results = [json.loads(result) for result in open(result_path)] + results[qas_id] = [ + SquadResult(result["unique_id"], result["start_logits"], result["end_logits"]) + for result in load_results + ] + + # check how many examples loaded completely(feature num == result num). + all_loaded_ids = [] + incomplete_cnt = 0 + for k, v in results.items(): + if len(v) == len(features[k]): + all_loaded_ids.append(k) + else: + incomplete_cnt += 1 + + if len(all_loaded_ids) >= batch_size or (i == total_feature_num - 1 and len(all_loaded_ids) > 0): + + batch_examples, batch_features, batch_results = [], [], [] + for qas_id in all_loaded_ids: + for feature in features.pop(qas_id): + feature.example_index = len(batch_examples) + batch_features.append(feature) + batch_examples.append(examples[qas_id]) + batch_results.extend(results.pop(qas_id)) + + predictions.update( + compute_predictions_logits( + batch_examples, + batch_features, + batch_results, + config.n_best_size, + config.max_answer_length, + config.do_lower_case, + None, # output_prediction_file, + None, # output_nbest_file, + None, # output_null_log_odds_file, + False, + config.version_2_with_negative, + config.null_score_diff_threshold, + tokenizer, + ) + ) + + del batch_examples, batch_features, batch_results + + eval_result, _ = squad_evaluate([v for k, v in examples.items()], predictions) + + eval_keys = ["HasAns_total", "NoAns_total", "best_exact", "best_f1"] + if config.version_2_with_negative: + eval_keys.extend( + [ + "HasAns_exact", + "HasAns_f1", + "NoAns_exact", + ] + ) + + results = {} + for k in eval_keys: + if k not in eval_result: + continue + results[k] = round(eval_result[k], 2) + + shutil.rmtree(pred_dir) + os.makedirs(pred_dir) + + return { + "results": results, + "best_score": results["best_f1"], + } diff --git a/finetune/model/__init__.py b/finetune/model/__init__.py new file mode 100644 index 0000000..c0298e2 --- /dev/null +++ b/finetune/model/__init__.py @@ -0,0 +1,4 @@ +from model.cls import ClsModel +from model.qa import QAModel + +MODEL_CLASS_MAP = {"cls": ClsModel, "qa": QAModel} diff --git a/finetune/model/base.py b/finetune/model/base.py new file mode 100644 index 0000000..7198023 --- /dev/null +++ b/finetune/model/base.py @@ -0,0 +1,87 @@ +import torch +from transformers import AdamW, get_linear_schedule_with_warmup + + +class BaseModel(torch.nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.from_pretrained() + + def save_pretrained(self, save_dir): + self.model.save_pretrained(save_dir) + for key in ["special_tokens_map_file", "tokenizer_file"]: + self.tokenizer.init_kwargs.pop(key, None) + self.tokenizer.save_pretrained(save_dir) + + def from_pretrained(self): + raise NotImplementedError + + def forward(self, inputs): + return self.model(**inputs) + + def eval_step(self, outputs): + raise NotImplementedError + + @staticmethod + def add_args(parser): + parser.add_argument( + "--model_name_or_path", + default=None, + type=str, + required=True, + ) + parser.add_argument("--data_dir", default="cache", type=str) + parser.add_argument("--train_file", default=None, type=str) + parser.add_argument("--predict_file", default=None, type=str) + + parser.add_argument("--do_lower_case", default=False, type=bool) + parser.add_argument("--max_seq_length", default=512, type=int) + parser.add_argument("--weight_decay", default=0.0, type=float, help="Weight decay if we apply some.") + parser.add_argument("--adam_epsilon", default=1e-8, type=float, help="Epsilon for Adam optimizer.") + parser.add_argument("--max_grad_norm", default=1.0, type=float) + parser.add_argument("--num_train_epochs", default=10, type=int) + parser.add_argument("--train_batch_size", default=8, type=int) + parser.add_argument("--eval_batch_size", default=16, type=int) + parser.add_argument("--learning_rate", default=3e-5, type=float) + parser.add_argument("--gradient_accumulation_steps", default=1, type=int) + parser.add_argument("--warmup_proportion", default=0.0, type=float) + + return parser + + def get_optimizer(self): + """Prepare optimizer""" + no_decay = ["bias", "LayerNorm.weight"] + optimizer_grouped_parameters = [ + { + "params": [p for n, p in self.model.named_parameters() if not any(nd in n for nd in no_decay)], + "weight_decay": self.config.weight_decay, + }, + { + "params": [p for n, p in self.model.named_parameters() if any(nd in n for nd in no_decay)], + "weight_decay": self.config.weight_decay, + }, + ] + optimizer = AdamW(optimizer_grouped_parameters, lr=self.config.learning_rate, eps=self.config.adam_epsilon) + return optimizer + + def get_scheduler(self, batch_num, optimizer): + """Prepare scheduler""" + if self.config.warmup_proportion == 0.0: + return None + + t_total = batch_num // self.config.gradient_accumulation_steps * self.config.num_train_epochs + + scheduler = get_linear_schedule_with_warmup( + optimizer, + num_warmup_steps=int(t_total * self.config.warmup_proportion), + num_training_steps=t_total, + ) + + return scheduler + + def tensor_to_array(self, tensor): + return tensor.detach().cpu().numpy() + + def tensor_to_list(self, tensor): + return self.tensor_to_array(tensor).tolist() diff --git a/finetune/model/cls.py b/finetune/model/cls.py new file mode 100644 index 0000000..b4b2c07 --- /dev/null +++ b/finetune/model/cls.py @@ -0,0 +1,47 @@ +import os + +import torch +from data.cls import process_map +from model.base import BaseModel +from transformers import AutoConfig, AutoModelForSequenceClassification, AutoTokenizer + + +class ClsModel(BaseModel): + def __init__(self, config, **kwargs): + super().__init__(config, **kwargs) + config.label2id = self.config.label2id + + def from_pretrained(self): + data_file = os.path.join(self.config.data_dir, str(self.config.train_file)) + self.config.label2id = process_map[self.config.dataset](self.config, data_file, True, get_label_map=True) + num_labels = len(self.config.label2id) + if num_labels != self.config.num_labels: + print( + f"given args num_labels({self.config.num_labels}) is not same with num_labels({num_labels}) from dataset." + ) + print(f"switch num_labels {self.config.num_labels} -> {num_labels}") + self.config.num_labels = num_labels + model_config = AutoConfig.from_pretrained(self.config.model_name_or_path, num_labels=self.config.num_labels) + model_config.label2id = self.config.label2id + model_config.id2label = {int(v): k for k, v in model_config.label2id.items()} + self.model = AutoModelForSequenceClassification.from_pretrained( + self.config.model_name_or_path, config=model_config, cache_dir=self.config.cache_dir + ) + self.tokenizer = AutoTokenizer.from_pretrained(self.config.model_name_or_path, cache_dir=self.config.cache_dir) + + def forward(self, inputs): + outputs = self.model(**inputs) + return outputs + + def eval_step(self, inputs, outputs): + logits = outputs.logits.detach().cpu() + predictions = self.tensor_to_list(torch.argmax(logits, dim=-1)) + labels = self.tensor_to_list(inputs["labels"]) + results = [{"prediction": prediction, "label": label} for prediction, label in zip(predictions, labels)] + return results + + @staticmethod + def add_args(parser): + parser = BaseModel.add_args(parser) + parser.add_argument("--num_labels", default=2, type=int) + return parser diff --git a/finetune/model/qa.py b/finetune/model/qa.py new file mode 100644 index 0000000..5ea2c7e --- /dev/null +++ b/finetune/model/qa.py @@ -0,0 +1,77 @@ +import json +import os +import shutil + +from model.base import BaseModel +from transformers import AutoModelForQuestionAnswering, AutoTokenizer + + +class QAModel(BaseModel): + def __init__(self, config, **kwargs): + super().__init__(config, **kwargs) + self.pred_dir = os.path.join(self.config.output_dir, self.config.task, "predictions") + if os.path.exists(self.pred_dir): + shutil.rmtree(self.pred_dir) + os.makedirs(self.pred_dir) + + def from_pretrained(self): + self.model = AutoModelForQuestionAnswering.from_pretrained( + self.config.model_name_or_path, + cache_dir=self.config.cache_dir, + ) + self.tokenizer = AutoTokenizer.from_pretrained( + self.config.model_name_or_path, cache_dir=self.config.cache_dir, use_fast=False + ) + + def forward(self, inputs): + if self.model.config.model_type in ["roberta"]: + inputs.pop("token_type_ids", None) + unique_id = inputs.pop("unique_id", None) + outputs = self.model(**inputs) + inputs["unique_id"] = unique_id + return outputs + + def eval_step(self, inputs, outputs): + start_logits, end_logits = outputs.start_logits, outputs.end_logits + start_logits = self.tensor_to_list(start_logits) + end_logits = self.tensor_to_list(end_logits) + sample_num = len(inputs["unique_id"]) + for i in range(sample_num): + qas_id = "_".join(inputs["unique_id"][i].split("_")[:-1]) + writer = open(os.path.join(self.pred_dir, str(qas_id)), "a") + write_data = { + "unique_id": inputs["unique_id"][i], + "start_logits": start_logits[i], + "end_logits": end_logits[i], + } + writer.write(json.dumps(write_data) + "\n") + writer.close() + return [None] * sample_num + + @staticmethod + def add_args(parser): + parser = BaseModel.add_args(parser) + parser.add_argument("--version_2_with_negative", action="store_true") + parser.add_argument("--null_score_diff_threshold", default=0.0, type=float) + parser.add_argument("--doc_stride", default=384, type=int) + parser.add_argument("--max_query_length", default=64, type=int) + parser.add_argument( + "--n_best_size", + default=20, + type=int, + help="The total number of n-best predictions to generate in the nbest_predictions.json output file.", + ) + parser.add_argument( + "--max_answer_length", + default=32, + type=int, + help="The maximum length of an answer that can be generated. This is needed because the start " + "and end predictions are not conditioned on one another.", + ) + parser.add_argument( + "--all_korquad_2_sample", + action="store_true", + help="Use all training samples from korquad2 or not. Do not use all samples by default " + "because of the limitation on computational resources", + ) + return parser diff --git a/finetune/requirements.txt b/finetune/requirements.txt new file mode 100644 index 0000000..3c93700 --- /dev/null +++ b/finetune/requirements.txt @@ -0,0 +1,7 @@ +tqdm==4.62.3 +torch==1.8.1 +transformers==4.11.3 +datasets==1.13.3 +scikit-learn==0.24.2 +beautifulsoup4==4.6.0 +lxml==4.6.3 diff --git a/finetune/run.py b/finetune/run.py new file mode 100644 index 0000000..581e997 --- /dev/null +++ b/finetune/run.py @@ -0,0 +1,259 @@ +import argparse +import copy +import json +import logging +import os + +import numpy as np +import torch +import transformers +from data import get_data +from evaluate import EVAL_FUNC_MAP +from model import MODEL_CLASS_MAP +from tqdm import tqdm + +logging.basicConfig( + format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", + datefmt="%m/%d/%Y %H:%M:%S", + level=logging.INFO, +) + +logger = logging.getLogger(__name__) + + +try: + import torch_xla.core.xla_model as xm + import torch_xla.distributed.data_parallel as xla_dp +except ImportError as e: + logger.error(f"Failed to import XLA. {e}") + + +def set_seed(seed): + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = True + + +def cal_running_avg_loss(loss, running_avg_loss, decay=0.99): + if running_avg_loss == 0: + return loss + running_avg_loss = running_avg_loss * decay + (1 - decay) * loss + return running_avg_loss + + +def _run_epoch(model, loader, device=None, context=None, **kwargs): + config = kwargs["config"] + is_train = kwargs["is_train"] + + avg_loss = 0 + results = [] + batch_num = len(loader) + + if is_train: + model.train() + if config.use_tpu: + optimizer = context.getattr_or( + "optimizer", + lambda: model.get_optimizer(), + ) + scheduler = context.getattr_or( + "scheduler", + lambda: model.get_scheduler(batch_num, optimizer), + ) + else: + optimizer = kwargs["optimizer"] + scheduler = kwargs["scheduler"] + else: + model.eval() + + is_master = True + if config.use_tpu: + is_master = xm.is_master_ordinal() + + pbar = tqdm(enumerate(loader), total=batch_num, disable=not is_master, dynamic_ncols=True) + for i, inputs in pbar: + + if not config.use_tpu: + for k, v in inputs.items(): + if isinstance(v, torch.Tensor): + inputs[k] = v.to(device) + + outputs = model(inputs) + loss = outputs.loss.mean() + avg_loss = cal_running_avg_loss(loss.item(), avg_loss) + loss /= config.gradient_accumulation_steps + + if is_train: + loss.backward() + if i % config.gradient_accumulation_steps == 0 or i == batch_num - 1: + + if config.max_grad_norm > 0: + torch.nn.utils.clip_grad_norm_(model.parameters(), config.max_grad_norm) + + if config.use_tpu: + xm.optimizer_step(optimizer) + else: + optimizer.step() + optimizer.zero_grad() + + if scheduler is not None: + scheduler.step() + else: + result = (model.module if hasattr(model, "module") else model).eval_step(inputs, outputs) + results.extend(result) + + if is_master: + pbar.set_description( + f"epoch: {kwargs['epoch'] + 1}, {('train' if is_train else 'valid')} loss: {min(100, round(avg_loss, 4))}" + ) + + return { + "loss": avg_loss, + "result": results, + } + + +def run_epoch(**kwargs): + model = kwargs.pop("model") + if kwargs["config"].use_tpu: + results = model(_run_epoch, **kwargs) + else: + results = _run_epoch(model, **kwargs) + + if isinstance(results, list): + loss = sum([result["loss"] for result in results]) / len(results) + result = [] + for res in results: + result.extend(res["result"]) + results = {"loss": loss, "result": result} + + return results + + +def run(parser): + # NOTE Remove redundant bigbird logs + transformers.logging.get_logger("transformers.models.big_bird.modeling_big_bird").setLevel(logging.ERROR) + + args, _ = parser.parse_known_args() + + model = MODEL_CLASS_MAP.get(args.task, None) + if model is None: + raise Exception(f"Invalid model task {args.task}") + + parser = model.add_args(parser) + config = parser.parse_args() + + set_seed(config.seed) + + model = model(config) + + logger.info(f"configuration: {str(config)}") + + if config.use_tpu: + devices = xm.get_xla_supported_devices() + model_dp = xla_dp.DataParallel(model, device_ids=devices) + else: + if torch.cuda.is_available(): + gpu_count = torch.cuda.device_count() + logger.info(f"{gpu_count} GPU device detected") + devices = ["cuda:{}".format(i) for i in range(gpu_count)] + model_dp = torch.nn.DataParallel(model, device_ids=devices) + model.to(devices[0]) + else: + devices = ["cpu"] + model_dp = model + + config.world_size = len(devices) + if config.do_train: + train_loader = get_data(config, tokenizer=model.tokenizer) + valid_loader = get_data(config, tokenizer=model.tokenizer, is_train=False) + + optimizer = None + scheduler = None + if not config.use_tpu and config.do_train: + optimizer = model.get_optimizer() + scheduler = model.get_scheduler(len(train_loader), optimizer) + + params = { + "config": config, + "model": model_dp, + "optimizer": optimizer, + "scheduler": scheduler, + } + if not config.use_tpu: + params["device"] = devices[0] + + def do_eval(epoch): + with torch.no_grad(): + results = run_epoch(loader=valid_loader, epoch=epoch, is_train=False, **params)["result"] + results = EVAL_FUNC_MAP[config.task]( + config=config, + model=model, + loader=valid_loader, + tokenizer=model.tokenizer, + results=results, + ) + + logger.info("Eval results.") + for k, v in results["results"].items(): + logger.info(f"{k} : {v}") + + return results["best_score"] + + if config.do_train: + best_score = 0 + for epoch in range(config.num_train_epochs): + run_epoch(loader=train_loader, epoch=epoch, is_train=True, **params) + + score = 0 + if config.do_eval_during_train: + score = do_eval(epoch) + + if score >= best_score: + best_score = score + output_dir = os.path.join(config.output_dir, config.task, config.dataset, f"{epoch}-{best_score}-ckpt") + copy.deepcopy( + model_dp.module + if hasattr(model_dp, "module") + else model_dp._models[0] + if hasattr(model_dp, "_models") + else model_dp + ).cpu().save_pretrained(output_dir) + with open(os.path.join(output_dir, "finetune_config.json"), "w") as save_config: + json.dump(vars(config), save_config, sort_keys=True, indent=4) + logger.info(f"Checkpoint {output_dir} saved.") + + if config.do_eval: + do_eval(-1) + + +def main(parser): + args, _ = parser.parse_known_args() + + if not os.path.exists(args.cache_dir): + os.makedirs(args.cache_dir) + + output_dir = os.path.join(args.output_dir, args.task, args.dataset) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + run(parser) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + + parser.add_argument("--task", default=None, type=str, required=True) + parser.add_argument("--dataset", default=None, type=str, required=True) + parser.add_argument("--cache_dir", default="cache", type=str) + parser.add_argument("--output_dir", default="output", type=str) + parser.add_argument("--do_train", action="store_true") + parser.add_argument("--do_eval_during_train", action="store_true") + parser.add_argument("--do_eval", action="store_true") + parser.add_argument("--use_tpu", action="store_true") + parser.add_argument("--threads", default=4, type=int) + parser.add_argument("--seed", default=42, type=int) + + main(parser) diff --git a/finetune/scripts/run_fake_news.sh b/finetune/scripts/run_fake_news.sh new file mode 100644 index 0000000..99bae5a --- /dev/null +++ b/finetune/scripts/run_fake_news.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +python3 run.py \ +--task cls \ +--dataset fake_news \ +--do_train \ +--do_eval_during_train \ +--do_eval \ +--use_tpu \ +--model_name_or_path monologg/kobigbird-bert-base \ +--data_dir cache/fake_news_data \ +--train_file mission2_train.csv \ +--predict_file mission2_train.csv \ +--max_seq_length 1024 \ +--train_batch_size 4 \ +--eval_batch_size 2 \ +--learning_rate 3e-5 \ +--gradient_accumulation_steps 2 \ +--num_labels 2 \ +--num_train_epochs 10 diff --git a/finetune/scripts/run_fake_news_short.sh b/finetune/scripts/run_fake_news_short.sh new file mode 100644 index 0000000..1cd4b84 --- /dev/null +++ b/finetune/scripts/run_fake_news_short.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +python3 run.py \ +--task cls \ +--dataset fake_news \ +--do_train \ +--do_eval_during_train \ +--do_eval \ +--use_tpu \ +--model_name_or_path klue/roberta-base \ +--data_dir cache/fake_news_data \ +--train_file mission2_train.csv \ +--predict_file mission2_train.csv \ +--max_seq_length 512 \ +--train_batch_size 8 \ +--eval_batch_size 8 \ +--learning_rate 3e-5 \ +--gradient_accumulation_steps 1 \ +--num_labels 2 \ +--num_train_epochs 10 diff --git a/finetune/scripts/run_korquad_2.sh b/finetune/scripts/run_korquad_2.sh new file mode 100644 index 0000000..c4d6cd0 --- /dev/null +++ b/finetune/scripts/run_korquad_2.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +python3 run.py \ +--task qa \ +--dataset korquad_2 \ +--do_train \ +--do_eval_during_train \ +--do_eval \ +--use_tpu \ +--model_name_or_path monologg/kobigbird-bert-base \ +--data_dir cache/korquad_2 \ +--train_file train \ +--predict_file dev \ +--max_seq_length 4096 \ +--doc_stride 3072 \ +--max_answer_length 4096 \ +--train_batch_size 2 \ +--eval_batch_size 1 \ +--learning_rate 3e-5 \ +--gradient_accumulation_steps 4 \ +--num_train_epochs 5 diff --git a/finetune/scripts/run_korquad_2_short.sh b/finetune/scripts/run_korquad_2_short.sh new file mode 100644 index 0000000..fef40de --- /dev/null +++ b/finetune/scripts/run_korquad_2_short.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +python3 run.py \ +--task qa \ +--dataset korquad_2 \ +--do_train \ +--do_eval_during_train \ +--do_eval \ +--use_tpu \ +--model_name_or_path klue/roberta-base \ +--data_dir cache/korquad_2 \ +--train_file train \ +--predict_file dev \ +--max_seq_length 512 \ +--doc_stride 384 \ +--max_answer_length 512 \ +--train_batch_size 8 \ +--eval_batch_size 8 \ +--learning_rate 3e-5 \ +--gradient_accumulation_steps 1 \ +--num_train_epochs 5 diff --git a/finetune/scripts/run_modu_sentiment.sh b/finetune/scripts/run_modu_sentiment.sh new file mode 100644 index 0000000..3316ee3 --- /dev/null +++ b/finetune/scripts/run_modu_sentiment.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +python3 run.py \ +--task cls \ +--dataset modu_sentiment \ +--do_train \ +--do_eval_during_train \ +--do_eval \ +--use_tpu \ +--model_name_or_path monologg/kobigbird-bert-base \ +--data_dir cache/modu-corpus/sentiment-analysis \ +--train_file EXSA2002108040.json \ +--predict_file EXSA2002108040.json \ +--max_seq_length 1024 \ +--train_batch_size 4 \ +--eval_batch_size 2 \ +--learning_rate 3e-5 \ +--gradient_accumulation_steps 2 \ +--num_labels 2 \ +--num_train_epochs 20 diff --git a/finetune/scripts/run_modu_sentiment_short.sh b/finetune/scripts/run_modu_sentiment_short.sh new file mode 100644 index 0000000..44a00a3 --- /dev/null +++ b/finetune/scripts/run_modu_sentiment_short.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +python3 run.py \ +--task cls \ +--dataset modu_sentiment \ +--do_train \ +--do_eval_during_train \ +--do_eval \ +--use_tpu \ +--model_name_or_path klue/roberta-base \ +--data_dir cache/modu-corpus/sentiment-analysis \ +--train_file EXSA2002108040.json \ +--predict_file EXSA2002108040.json \ +--max_seq_length 512 \ +--train_batch_size 8 \ +--eval_batch_size 8 \ +--learning_rate 3e-5 \ +--gradient_accumulation_steps 1 \ +--num_labels 2 \ +--num_train_epochs 20 diff --git a/finetune/scripts/run_tydiqa.sh b/finetune/scripts/run_tydiqa.sh new file mode 100644 index 0000000..60adb7c --- /dev/null +++ b/finetune/scripts/run_tydiqa.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +python3 run.py \ +--task qa \ +--dataset tydiqa \ +--do_train \ +--do_eval_during_train \ +--do_eval \ +--use_tpu \ +--model_name_or_path monologg/kobigbird-bert-base \ +--data_dir cache/tydiqa \ +--train_file tydiqa-v1.0-train.jsonl \ +--predict_file tydiqa-v1.0-dev.jsonl \ +--max_seq_length 4096 \ +--doc_stride 3072 \ +--max_answer_length 32 \ +--version_2_with_negative \ +--train_batch_size 2 \ +--eval_batch_size 1 \ +--learning_rate 3e-5 \ +--gradient_accumulation_steps 4 \ +--num_train_epochs 5 diff --git a/finetune/scripts/run_tydiqa_short.sh b/finetune/scripts/run_tydiqa_short.sh new file mode 100644 index 0000000..59ad063 --- /dev/null +++ b/finetune/scripts/run_tydiqa_short.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +python3 run.py \ +--task qa \ +--dataset tydiqa \ +--do_train \ +--do_eval_during_train \ +--do_eval \ +--use_tpu \ +--model_name_or_path klue/roberta-base \ +--data_dir cache/tydiqa \ +--train_file tydiqa-v1.0-train.jsonl \ +--predict_file tydiqa-v1.0-dev.jsonl \ +--max_seq_length 512 \ +--doc_stride 384 \ +--max_answer_length 32 \ +--version_2_with_negative \ +--train_batch_size 8 \ +--eval_batch_size 8 \ +--learning_rate 3e-5 \ +--gradient_accumulation_steps 1 \ +--num_train_epochs 5 diff --git a/pretrain/.gitignore b/pretrain/.gitignore new file mode 100644 index 0000000..60490f0 --- /dev/null +++ b/pretrain/.gitignore @@ -0,0 +1,6 @@ +data/ +pretrain_tfrecords/ +*wiki*.txt + +init_checkpoint/ +kobigbird-bert-base*/ diff --git a/pretrain/README.md b/pretrain/README.md new file mode 100644 index 0000000..092369c --- /dev/null +++ b/pretrain/README.md @@ -0,0 +1,156 @@ +# Pretraining BigBird + +

    + ํ•œ๊ตญ์–ด | + English +

    + +## ๊ธฐ์กด BigBird ์ฝ”๋“œ์™€์˜ ์ฐจ์ด์  ๋ฐ ๊ฐœ์„ ์  + +> NOTE: ์›๋ณธ BigBird ์ฝ”๋“œ๋Š” [original bigbird github](https://github.com/google-research/bigbird) ์ฐธ๊ณ  + +- **RoBERTa๊ฐ€ ์•„๋‹Œ BERT๋ฅผ ์ด์šฉํ•˜์—ฌ warm start** + + - ์ž์ฒด์ ์œผ๋กœ ์ƒˆ๋กœ ํ•™์Šตํ•œ BERT๋ฅผ ์ด์šฉ + +- **BERT checkpoint loading ๊ด€๋ จ ์ด์Šˆ ํ•ด๊ฒฐ** + + - tf variable name์„ ์ˆ˜์ •ํ•˜์—ฌ LMHead๊ฐ€ ์ •์ƒ์ ์œผ๋กœ ๋กœ๋”ฉ๋˜์ง€ ์•Š๋Š” ์ด์Šˆ ํ•ด๊ฒฐ (e.g. `transform` -> `transform/dense`) + - ๊ธฐ์กด์˜ 512์ธ position embeddings์„ ๋กœ๋”ฉํ•˜์ง€ ์•Š๋„๋ก ์ฒ˜๋ฆฌ + +- **Hard Coding ์ด์Šˆ ํ•ด๊ฒฐ** + + - `MAX_SEQ_LEN=4096`์œผ๋กœ ๊ฐ•์ œ๋˜์–ด ์žˆ๋Š” ๋ถ€๋ถ„ + - Sentencepiece tokenizer ์‚ฌ์šฉ์ด ๊ฐ•์ œ๋˜์–ด ์žˆ๋Š” ๋ถ€๋ถ„ + - RoBERTa Vocab์— ๋งž์ถฐ token id๊ฐ€ ํ•˜๋“œ์ฝ”๋”ฉ๋œ ๋ถ€๋ถ„ + +- **`Tensorflow Datasets (tfds)`๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ์•Š๋„๋ก ๋ณ€๊ฒฝ** + + - ์—ฌ๋Ÿฌ ๋ฒ„์ „์œผ๋กœ ํ…Œ์ŠคํŠธํ•ด๋ณด์•˜์ง€๋งŒ ์ •์ƒ ์ž‘๋™ํ•˜์ง€ ์•Š๋Š” ์ด์Šˆ ์กด์žฌ + - ๊ทธ ๋Œ€์‹  TFRecord Builder ์ฝ”๋“œ ์ถ”๊ฐ€ ([`create_pretraining_data.py`](./create_pretraining_data.py)) + +## How to Pretrain + +### 0. (Optional) Prepare your own BERT + +- Warm Start๋ฅผ ํ•˜์ง€ ์•Š๋Š”๋‹ค๋ฉด BERT๊ฐ€ ์—†์–ด๋„ ์ƒ๊ด€์—†์Šต๋‹ˆ๋‹ค. +- BERT๋ฅผ ์ง์ ‘ ๋งŒ๋“ค๊ณ  ์‹ถ์œผ๋ฉด [BERT Github](https://github.com/google-research/bert)๋ฅผ ์ฐธ๊ณ  +- ์•„๋ž˜์™€ ๊ฐ™์€ ํ˜•ํƒœ๋กœ **tensorflow v1 checkpoint**๋ฅผ ์ค€๋น„ํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. + +```text +init_checkpoint +โ”œโ”€โ”€ bert_config.json +โ”œโ”€โ”€ checkpoint +โ”œโ”€โ”€ graph.pbtxt +โ”œโ”€โ”€ model.ckpt-0.data-00000-of-00001 +โ”œโ”€โ”€ model.ckpt-0.index +โ””โ”€โ”€ model.ckpt-0.meta +``` + +### 1. Prepare Tokenizer + +- `Huggingface Transformers`์— ํ˜ธํ™˜๋˜๋Š” tokenizer ์ค€๋น„ํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. (KoBigBird์˜ [tokenizer](./tokenizer)๋ฅผ sample๋กœ ์—…๋กœ๋“œ) +- `BigBirdTokenizer`์™€์˜ ํ˜ธํ™˜์„ ์œ„ํ•ด `BertTokenizer`์— `bos token(=)`, `eos token(=)`์„ ์ถ”๊ฐ€ + +### 2. Create TFRecord + +```bash +bash scripts/build_tfrecord.sh +``` + +#### Prepare Corpus + +[`ko_lm_dataformat`](https://github.com/monologg/ko_lm_dataformat) ํ˜น์€ `txt` ํŒŒ์ผ ์‚ฌ์šฉ ๊ฐ€๋Šฅ + +- `ko_lm_dataformat`์€ sentence split๋œ document ๋‹จ์œ„๋กœ ๋“ค์–ด์˜จ๋‹ค๊ณ  ๊ฐ€์ • + +```python +for data in rdr.stream_data(): + print(data) + +# data +['์ œ์ž„์Šค ์–ผ ์นดํ„ฐ ์ฃผ๋‹ˆ์–ด(1924๋…„ 10์›” 1์ผ ~)๋Š” ๋ฏผ์ฃผ๋‹น ์ถœ์‹  ๋ฏธ๊ตญ 39๋Œ€ ๋Œ€ํ†ต๋ น (1977๋…„ ~ 1981๋…„)์ด๋‹ค.', + '์ง€๋ฏธ ์นดํ„ฐ๋Š” ์กฐ์ง€์•„์ฃผ ์„ฌํ„ฐ ์นด์šดํ‹ฐ ํ”Œ๋ ˆ์ธ์Šค ๋งˆ์„์—์„œ ํƒœ์–ด๋‚ฌ๋‹ค.', + '๊ทธ ํ›„ ํ•ด๊ตฐ์— ๋“ค์–ด๊ฐ€ ์ „ํ•จยท์›์ž๋ ฅยท์ž ์ˆ˜ํ•จ์˜ ์Šน๋ฌด์›์œผ๋กœ ์ผํ•˜์˜€๋‹ค'., + ...] +``` + +- `txt`์˜ ๊ฒฝ์šฐ document ์‚ฌ์ด์— newline์ด ์žˆ๋‹ค๊ณ  ๊ฐ€์ • + +```text +์ œ์ž„์Šค ์–ผ ์นดํ„ฐ ์ฃผ๋‹ˆ์–ด(1924๋…„ 10์›” 1์ผ ~)๋Š” ๋ฏผ์ฃผ๋‹น ์ถœ์‹  ๋ฏธ๊ตญ 39๋Œ€ ๋Œ€ํ†ต๋ น (1977๋…„ ~ 1981๋…„)์ด๋‹ค. +์ง€๋ฏธ ์นดํ„ฐ๋Š” ์กฐ์ง€์•„์ฃผ ์„ฌํ„ฐ ์นด์šดํ‹ฐ ํ”Œ๋ ˆ์ธ์Šค ๋งˆ์„์—์„œ ํƒœ์–ด๋‚ฌ๋‹ค. + +์ˆ˜ํ•™(ๆ•ธๅญธ)์€ ์ˆ˜, ์–‘, ๊ตฌ์กฐ, ๊ณต๊ฐ„, ๋ณ€ํ™” ๋“ฑ์˜ ๊ฐœ๋…์„ ๋‹ค๋ฃจ๋Š” ํ•™๋ฌธ์ด๋‹ค. +๋„๋ฆฌ ๋ฐ›์•„๋“ค์—ฌ์ง€๋Š” ๋ช…ํ™•ํ•œ ์ •์˜๋Š” ์—†์œผ๋‚˜ ํ˜„๋Œ€ ์ˆ˜ํ•™์€ ์ผ๋ฐ˜์ ์œผ๋กœ ์—„๋ฐ€ํ•œ ๋…ผ๋ฆฌ์— ๊ทผ๊ฑฐํ•˜์—ฌ ์ถ”์ƒ์  ๋Œ€์ƒ์„ ํƒ๊ตฌํ•˜๋ฉฐ, ์ด๋Š” ๊ทœ์น™์˜ ๋ฐœ๊ฒฌ๊ณผ ๋ฌธ์ œ์˜ ์ œ์‹œ ๋ฐ ํ•ด๊ฒฐ์˜ ๊ณผ์ •์œผ๋กœ ์ด๋ฃจ์–ด์ง„๋‹ค. + +๋ฌธํ•™(ๆ–‡ๅญธ)์€ ์–ธ์–ด๋ฅผ ์˜ˆ์ˆ ์  ํ‘œํ˜„์˜ ์ œ์žฌ๋กœ ์‚ผ์•„ ์ƒˆ๋กœ์šด ์˜๋ฏธ๋ฅผ ์ฐฝ์ถœํ•˜์—ฌ, ์ธ๊ฐ„๊ณผ ์‚ฌํšŒ๋ฅผ ์ง„์‹ค๋˜๊ฒŒ ๋ฌ˜์‚ฌํ•˜๋Š” ์˜ˆ์ˆ ์˜ ํ•˜์œ„๋ถ„์•ผ์ด๋‹ค. +๊ฐ„๋‹จํ•˜๊ฒŒ ์„ค๋ช…ํ•˜๋ฉด, ์–ธ์–ด๋ฅผ ํ†ตํ•ด ์ธ๊ฐ„์˜ ์‚ถ์„ ๋ฏธ์ (็พŽ็š„)์œผ๋กœ ํ˜•์ƒํ™”ํ•œ ๊ฒƒ์ด๋ผ๊ณ  ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +``` + +#### Details + +- `BertTokenizer` ์‚ฌ์šฉ์„ ๊ฐ€์ •ํ•˜๊ณ  ์ฝ”๋“œ ์ž‘์„ฑ (๋งŒ์ผ ๋‹ค๋ฅธ tokenizer๋ฅผ ์‚ฌ์šฉํ•  ๊ฒฝ์šฐ ์ฝ”๋“œ๋ฅผ ์ง์ ‘ ์ˆ˜์ •ํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค) +- `whole word masking`, `max ngram masking` ์ ์šฉ ๊ฐ€๋Šฅ +- `static masking` (dupe_factor๋ฅผ ํ†ตํ•ด ๋‹ค๋ฅธ ๋งˆ์Šคํ‚น์œผ๋กœ ๋ฐ์ดํ„ฐ๋ฅผ ๋ช‡ ๋ฐฐ๋กœ ๋งŒ๋“ค์ง€ ๊ฒฐ์ •) +- `long_seq_threshold`๋ฅผ ํ†ตํ•ด example์„ ๋งŒ๋“ค ๋•Œ ๋„ˆ๋ฌด ๊ธด ๋ฌธ์žฅ์˜ ๊ฒฝ์šฐ ์—ฌ๋Ÿฌ ๊ฐœ์˜ example๋กœ ๋‚˜๋ˆ ์คŒ +- `RoBERTa`์—์„œ ์‚ฌ์šฉํ•œ **full sentences** ๋ฐฉ์‹์œผ๋กœ example ์ƒ์„ฑ + +### 3. Pretraining with TPU + +- **TPU์˜ tensorflow version์€ `2.3.1`์„ ๊ถŒ์žฅ** +- ๊ธฐ๋ณธ์ ์œผ๋กœ BERT์—์„œ Warm start ํ–ˆ์œผ๋ฉฐ, `position embedding (size 4096)`๋งŒ random initialize ์ฒ˜๋ฆฌ +- `max_predictions_per_seq=640` ์œผ๋กœ ์„ค์ • (์› ๋…ผ๋ฌธ์˜ 600๋ณด๋‹ค ํฌ๊ฒŒ ์„ค์ •) +- `tokenizer`, `pretrain_config.json` ๋ชจ๋‘ `output_dir`์— ์ €์žฅํ•˜๋„๋ก ์ฒ˜๋ฆฌ (์ถ”ํ›„ huggingface transformers ํฌ๋งท์œผ๋กœ ๋ณ€ํ™˜ํ•  ๋•Œ ์‚ฌ์šฉ) + +#### How to run + +- Google Storage์— `tfrecord` ์™€ `BERT checkpoint`(optional) ์—…๋กœ๋“œ + +```bash +gsutil -m cp -r pretrain_tfrecords gs://{$BUCKET_NAME} # tfrecord +gsutil -m cp -r init_checkpoint gs://{$BUCKET_NAME} # BERT tf v1 ckpt +``` + +- GCP cloud shell์—์„œ ์•„๋ž˜์˜ ๋ช…๋ น์–ด๋กœ instance์™€ TPU ์„ธํŒ… + +```bash +ctpu up --zone=europe-west4-a --tf-version=2.3.1 --tpu-size=v3-8 --machine-type=n1-standard-1 --disk-size-gb=20 --name={$GCP_NAME} --project={$PROJECT_NAME} +``` + +- Instance์—์„œ ํ•™์Šต ์ง„ํ–‰ + + - BERT tf v1 checkpoint๋ฅผ warm start์— ์‚ฌ์šฉํ•  ์‹œ ์Šคํฌ๋ฆฝํŠธ์— ์•„๋ž˜์™€ ๊ฐ™์ด ์ธ์ž๋ฅผ ์ถ”๊ฐ€ํ•ด์ฃผ์„ธ์š” + + ```bash + --init_checkpoint=gs://$BUCKET_NAME/init_checkpoint/model.ckpt-0 + ``` + +```bash +cd pretrain +pip3 install -r requirements.txt +bash scripts/base_size_tpu.sh +``` + +## Convert Tensorflow checkpoint to Huggingface Transformers format + +```bash +python3 convert_bigbird_tf_to_pt.py \ + --checkpoint_dir $ORIG_TF_BERT_CKPT \ + --big_bird_config_file $BIGBIRD_CONFIG_PATH \ + --output_dir $PT_OUTPUT_DIR \ + --tokenizer_dir $TOKENIZER_DIR \ + --step_on_output +``` + +- `--big_bird_config_file`๋ฅผ ๋ช…์‹œํ•˜์ง€ ์•Š์œผ๋ฉด, script๊ฐ€ ์ž๋™์œผ๋กœ tensorflow checkpoint ์•ˆ์˜ `pretrain_config.json`์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. +- `--tokenizer_dir`๋ฅผ ๋ช…์‹œํ•˜์ง€ ์•Š์œผ๋ฉด, script๊ฐ€ ์ž๋™์œผ๋กœ tensorflow checkpoint ์•ˆ์˜ `tokenizer`๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. + +## Reference + +- [Original BigBird implementation](https://github.com/google-research/bigbird) +- [BERT tensorflow v1 implementation](https://github.com/google-research/bert) +- [BERT tensorflow v2 implementation](https://github.com/tensorflow/models/tree/d4c5f8975a7b89f01421101882bc8922642c2314/official/nlp/bert) +- [ELECTRA implementation](https://github.com/google-research/electra) +- [Huggingface Transformers Documentation](https://huggingface.co/transformers/) +- [ko-lm-dataformat](https://github.com/monologg/ko_lm_dataformat) diff --git a/pretrain/README_EN.md b/pretrain/README_EN.md new file mode 100644 index 0000000..8798df7 --- /dev/null +++ b/pretrain/README_EN.md @@ -0,0 +1,156 @@ +# Pretraining BigBird + +

    + ํ•œ๊ตญ์–ด | + English +

    + +## Differences & Improvements from existing BigBird code + +> NOTE: Refer [original bigbird github](https://github.com/google-research/bigbird) for the original BigBird code + +- **Warm start using BERT instead of RoBERTa** + + - Use newly pretrained BERT for warm start + +- **Fix BERT checkpoint loading issues** + + - Fix the issue where LMHead doesn't load properly by modifying tf variable name (e.g. `transform` -> `transform/dense`) + - Prevent not to load existing 512 position embeddings + +- **Fix Hard Coding issues** + + - Forced `MAX_SEQ_LEN=4096` + - Forced to use Sentencepiece tokenizer + - Token ids which are hard-coded according to the RoBERTa Vocab + +- **Change not to use `Tensorflow Datasets (tfds)`** + + - We've tested with several versions, but doesn't work properly. + - Instead, we add TFRecord builder code ([`create_pretraining_data.py`](./create_pretraining_data.py)) + +## How to Pretrain + +### 0. (Optional) Prepare your own BERT + +- It doesn't matter if you don't have a BERT if you don't use warm starting. +- Refer [BERT Github](https://github.com/google-research/bert) if you want to make BERT by yourself. +- Prepare **tensorflow v1 checkpoint** as below. + +```text +init_checkpoint +โ”œโ”€โ”€ bert_config.json +โ”œโ”€โ”€ checkpoint +โ”œโ”€โ”€ graph.pbtxt +โ”œโ”€โ”€ model.ckpt-0.data-00000-of-00001 +โ”œโ”€โ”€ model.ckpt-0.index +โ””โ”€โ”€ model.ckpt-0.meta +``` + +### 1. Prepare Tokenizer + +- Prepare the tokenizer witch is compatible with `Huggingface Transformers`. (Uploaded KoBigBird [tokenizer](./tokenizer) as sample) +- Added `bos token(=)`and `eos token(=)` in `BertTokenizer` for compatibility with `BigBirdTokenizer` + +### 2. Create TFRecord + +```bash +bash scripts/build_tfrecord.sh +``` + +#### Prepare Corpus + +You can use either [`ko_lm_dataformat`](https://github.com/monologg/ko_lm_dataformat) or `txt` file. + +- Assume that `ko_lm_dataformat` comes in sentence-splitted documents + +```python +for data in rdr.stream_data(): + print(data) + +# data +['์ œ์ž„์Šค ์–ผ ์นดํ„ฐ ์ฃผ๋‹ˆ์–ด(1924๋…„ 10์›” 1์ผ ~)๋Š” ๋ฏผ์ฃผ๋‹น ์ถœ์‹  ๋ฏธ๊ตญ 39๋Œ€ ๋Œ€ํ†ต๋ น (1977๋…„ ~ 1981๋…„)์ด๋‹ค.', + '์ง€๋ฏธ ์นดํ„ฐ๋Š” ์กฐ์ง€์•„์ฃผ ์„ฌํ„ฐ ์นด์šดํ‹ฐ ํ”Œ๋ ˆ์ธ์Šค ๋งˆ์„์—์„œ ํƒœ์–ด๋‚ฌ๋‹ค.', + '๊ทธ ํ›„ ํ•ด๊ตฐ์— ๋“ค์–ด๊ฐ€ ์ „ํ•จยท์›์ž๋ ฅยท์ž ์ˆ˜ํ•จ์˜ ์Šน๋ฌด์›์œผ๋กœ ์ผํ•˜์˜€๋‹ค'., + ...] +``` + +- For `txt`, assume that there is a newline between documents + +```text +์ œ์ž„์Šค ์–ผ ์นดํ„ฐ ์ฃผ๋‹ˆ์–ด(1924๋…„ 10์›” 1์ผ ~)๋Š” ๋ฏผ์ฃผ๋‹น ์ถœ์‹  ๋ฏธ๊ตญ 39๋Œ€ ๋Œ€ํ†ต๋ น (1977๋…„ ~ 1981๋…„)์ด๋‹ค. +์ง€๋ฏธ ์นดํ„ฐ๋Š” ์กฐ์ง€์•„์ฃผ ์„ฌํ„ฐ ์นด์šดํ‹ฐ ํ”Œ๋ ˆ์ธ์Šค ๋งˆ์„์—์„œ ํƒœ์–ด๋‚ฌ๋‹ค. + +์ˆ˜ํ•™(ๆ•ธๅญธ)์€ ์ˆ˜, ์–‘, ๊ตฌ์กฐ, ๊ณต๊ฐ„, ๋ณ€ํ™” ๋“ฑ์˜ ๊ฐœ๋…์„ ๋‹ค๋ฃจ๋Š” ํ•™๋ฌธ์ด๋‹ค. +๋„๋ฆฌ ๋ฐ›์•„๋“ค์—ฌ์ง€๋Š” ๋ช…ํ™•ํ•œ ์ •์˜๋Š” ์—†์œผ๋‚˜ ํ˜„๋Œ€ ์ˆ˜ํ•™์€ ์ผ๋ฐ˜์ ์œผ๋กœ ์—„๋ฐ€ํ•œ ๋…ผ๋ฆฌ์— ๊ทผ๊ฑฐํ•˜์—ฌ ์ถ”์ƒ์  ๋Œ€์ƒ์„ ํƒ๊ตฌํ•˜๋ฉฐ, ์ด๋Š” ๊ทœ์น™์˜ ๋ฐœ๊ฒฌ๊ณผ ๋ฌธ์ œ์˜ ์ œ์‹œ ๋ฐ ํ•ด๊ฒฐ์˜ ๊ณผ์ •์œผ๋กœ ์ด๋ฃจ์–ด์ง„๋‹ค. + +๋ฌธํ•™(ๆ–‡ๅญธ)์€ ์–ธ์–ด๋ฅผ ์˜ˆ์ˆ ์  ํ‘œํ˜„์˜ ์ œ์žฌ๋กœ ์‚ผ์•„ ์ƒˆ๋กœ์šด ์˜๋ฏธ๋ฅผ ์ฐฝ์ถœํ•˜์—ฌ, ์ธ๊ฐ„๊ณผ ์‚ฌํšŒ๋ฅผ ์ง„์‹ค๋˜๊ฒŒ ๋ฌ˜์‚ฌํ•˜๋Š” ์˜ˆ์ˆ ์˜ ํ•˜์œ„๋ถ„์•ผ์ด๋‹ค. +๊ฐ„๋‹จํ•˜๊ฒŒ ์„ค๋ช…ํ•˜๋ฉด, ์–ธ์–ด๋ฅผ ํ†ตํ•ด ์ธ๊ฐ„์˜ ์‚ถ์„ ๋ฏธ์ (็พŽ็š„)์œผ๋กœ ํ˜•์ƒํ™”ํ•œ ๊ฒƒ์ด๋ผ๊ณ  ๋ณผ ์ˆ˜ ์žˆ๋‹ค. +``` + +#### Details + +- Code assumes you are using `BertTokenizer`. (If you use a different tokenizer, you need to modify the code by yourself) +- `whole word masking` and `max ngram masking` are available. +- `static masking` (Decide how many times to double the data with different masking via dupe_factor) +- When creating an example through `long_seq_threshold`, a sentence that is too long is divided into multiple examples. +- Create examples with **full sentences**, according to the method used in `RoBERTa` + +### 3. Pretraining with TPU + +- **Strongly recommend to use `tensorflow==2.3.1` for TPU.** +- Warm started from BERT, and only `position embedding (size 4096)` was random initialized. +- `max_predictions_per_seq=640` (set greater than 600 of the original paper) +- Both `tokenizer` and `pretrain_config.json` will be saved in `output_dir` (Will be used when converting to huggingface transformers format) + +#### How to run + +- Upload `tfrecord` and `BERT checkpoint` (optional) to Google Storage + +```bash +gsutil -m cp -r pretrain_tfrecords gs://{$BUCKET_NAME} # tfrecord +gsutil -m cp -r init_checkpoint gs://{$BUCKET_NAME} # BERT tf v1 ckpt +``` + +- In GCP cloud shell, set the instance and TPU with the following command + +```bash +ctpu up --zone=europe-west4-a --tf-version=2.3.1 --tpu-size=v3-8 --machine-type=n1-standard-1 --disk-size-gb=20 --name={$GCP_NAME} --project={$PROJECT_NAME} +``` + +- Run training on instance + + - When using BERT tf v1 checkpoint for warm start, please add the following argument to the script. + + ```bash + --init_checkpoint=gs://$BUCKET_NAME/init_checkpoint/model.ckpt-0 + ``` + +```bash +cd pretrain +pip3 install -r requirements.txt +bash scripts/base_size_tpu.sh +``` + +## Convert Tensorflow checkpoint to Huggingface Transformers format + +```bash +python3 convert_bigbird_tf_to_pt.py \ + --checkpoint_dir $ORIG_TF_BERT_CKPT \ + --big_bird_config_file $BIGBIRD_CONFIG_PATH \ + --output_dir $PT_OUTPUT_DIR \ + --tokenizer_dir $TOKENIZER_DIR \ + --step_on_output +``` + +- If you don't specify `--big_bird_config_file`, the script will automatically use `pretrain_config.json` in tensorflow checkpoint. +- If `--tokenizer_dir` is not specified, the script will automatically use the `tokenizer` in tensorflow checkpoint. + +## Reference + +- [Original BigBird implementation](https://github.com/google-research/bigbird) +- [BERT tensorflow v1 implementation](https://github.com/google-research/bert) +- [BERT tensorflow v2 implementation](https://github.com/tensorflow/models/tree/d4c5f8975a7b89f01421101882bc8922642c2314/official/nlp/bert) +- [ELECTRA implementation](https://github.com/google-research/electra) +- [Huggingface Transformers Documentation](https://huggingface.co/transformers/) +- [ko-lm-dataformat](https://github.com/monologg/ko_lm_dataformat) diff --git a/pretrain/convert_bigbird_tf_to_pt.py b/pretrain/convert_bigbird_tf_to_pt.py new file mode 100644 index 0000000..dc4ee7e --- /dev/null +++ b/pretrain/convert_bigbird_tf_to_pt.py @@ -0,0 +1,161 @@ +# coding=utf-8 +# Copyright 2021 The HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Convert BigBird checkpoint.""" + + +import argparse +import json +import os + +import torch +import transformers +from transformers import AutoTokenizer, BigBirdConfig, BigBirdForPreTraining, load_tf_weights_in_big_bird +from transformers.utils import check_min_version, logging +from transformers.utils.versions import require_version + +logger = transformers.logging.get_logger() +logging.set_verbosity_info() +transformers.logging.enable_default_handler() +transformers.logging.enable_explicit_format() + +# NOTE check library version +require_version("torch>=1.6.0", "To fix: pip install torch>=1.6.0") +check_min_version("4.11.3") + +logger.warning("This script is tested on `transformers==4.11.3`. Might not work properly on other version!") + + +def load_bigbird_config(config_filepath): + """Load bigbird config from original tf checkpoint. (pretrain_config.json)""" + config_key_lst = [ + "attention_probs_dropout_prob", + "attention_type", + "block_size", + "pad_token_id", + "bos_token_id", + "eos_token_id", + "sep_token_id", + "gradient_checkpointing", # Originally `use_gradient_checkpointing` + "hidden_dropout_prob", + "hidden_size", + "initializer_range", + "intermediate_size", + "max_position_embeddings", + "num_attention_heads", + "num_hidden_layers", + "num_random_blocks", + "pad_token_id", + "rescale_embeddings", + "type_vocab_size", + "use_bias", + "vocab_size", + ] + + config = { + "position_embedding_type": "absolute", + "tokenizer_class": "BertTokenizer", # NOTE Remove this one if you use other tokenizer. + } + + with open(config_filepath, "r", encoding="utf-8") as f: + tf_config = json.load(f) + + for config_key in config_key_lst: + if config_key in tf_config: + config[config_key] = tf_config[config_key] + else: + if config_key == "gradient_checkpointing": + config[config_key] = tf_config["use_gradient_checkpointing"] + elif config_key == "num_random_blocks": + config[config_key] = tf_config["num_rand_blocks"] + elif config_key == "rescale_embeddings": + config[config_key] = tf_config["rescale_embedding"] + else: + raise KeyError(f"{config_key} not in tensorflow config!!") + + return dict(sorted(config.items())) + + +def convert_tf_checkpoint_to_pytorch(tf_checkpoint_path, big_bird_config_file, output_dir, tokenizer_path): + # Initialize PyTorch model + config = BigBirdConfig.from_dict(load_bigbird_config(os.path.join(tf_checkpoint_path, big_bird_config_file))) + print(f"Building PyTorch model from configuration: {config}") + + model = BigBirdForPreTraining(config) + + # Load weights from tf checkpoint + load_tf_weights_in_big_bird(model, tf_checkpoint_path, is_trivia_qa=False) + + # Save pytorch-model + print(f"Save PyTorch model to {output_dir}") + model.save_pretrained(output_dir) + + # NOTE Convert model which is compatible for torch<1.5 + pytorch_model = torch.load(os.path.join(output_dir, "pytorch_model.bin")) + torch.save( + pytorch_model, + os.path.join(args.output_dir, "pytorch_model.bin"), + _use_new_zipfile_serialization=False, + ) + + # Save tokenizer + tokenizer = AutoTokenizer.from_pretrained(tokenizer_path) + tokenizer.model_max_length = config.max_position_embeddings # 1024, 2048, 4096 + tokenizer.save_pretrained(output_dir) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + # Required parameters + parser.add_argument( + "--checkpoint_dir", default=None, type=str, required=True, help="Path to the TensorFlow checkpoint path." + ) + parser.add_argument( + "--big_bird_config_file", + default="pretrain_config.json", + type=str, + help="The config json file corresponding to the pre-trained BigBird model. \n" + "This specifies the model architecture.", + ) + parser.add_argument("--output_dir", default=None, type=str, required=True, help="Path to the output PyTorch model.") + parser.add_argument( + "--tokenizer_dir", + default=None, + type=str, + help="Tokenizer path (include vocab.txt, tokenizer_config.json, special_tokens_map.json)\n" + "If not specified, converter will check tokenizer dir in tensorflow checkpoint", + ) + parser.add_argument( + "--step_on_output", + action="store_true", + help="Whether to write step on pytorch output\ne.g. kobigbird-bert-base-200k, kobigbird-bert-base-600k", + ) + args = parser.parse_args() + + # Write step on output_dir + if args.step_on_output: + with open(os.path.join(args.checkpoint_dir, "checkpoint"), "r", encoding="utf-8") as f: + line = f.readline() # read only first line & check with step of checkpoint to convert + step = line.split("-")[-1][:-1] + + if len(step) <= 4: + raise ValueError("Step should be bigger than 1k") + step = step[:-4] + + args.output_dir = args.output_dir + f"-{step}k" + + # Check tokenizer path (If tokenizer_dir is not specified, get tokenizer path from tf checkpoint dir) + tokenizer_path = args.tokenizer_dir if args.tokenizer_dir else os.path.join(args.checkpoint_dir, "tokenizer") + + convert_tf_checkpoint_to_pytorch(args.checkpoint_dir, args.big_bird_config_file, args.output_dir, tokenizer_path) diff --git a/pretrain/create_pretraining_data.py b/pretrain/create_pretraining_data.py new file mode 100644 index 0000000..c5af85d --- /dev/null +++ b/pretrain/create_pretraining_data.py @@ -0,0 +1,698 @@ +# coding=utf-8 +# Copyright 2020 The Google Research Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Writes out text data as tfrecords""" + +import argparse +import collections +import itertools +import multiprocessing +import os +import random +import sys +import time +from pathlib import Path + +import ko_lm_dataformat as kldf +import numpy as np +import tensorflow as tf +from transformers import AutoTokenizer + + +def printable_text(text): + """Returns text encoded in a way suitable for print or `tf.logging`.""" + + # These functions want `str` for both Python2 and Python3, but in one case + # it's a Unicode string and in the other it's a byte string. + if isinstance(text, str): + return text + elif isinstance(text, bytes): + return text.decode("utf-8", "ignore") + else: + raise ValueError("Unsupported string type: %s" % (type(text))) + + +def create_int_feature(values): + feature = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) + return feature + + +def create_float_feature(values): + feature = tf.train.Feature(float_list=tf.train.FloatList(value=list(values))) + return feature + + +MaskedLmInstance = collections.namedtuple("MaskedLmInstance", ["index", "label"]) + +# A _Gram is a [half-open) interval of token indices which form a word. +# E.g., +# words: ["The", "doghouse"] +# tokens: ["The", "dog", "##house"] +# grams: [(0,1), (1,3)] +_Gram = collections.namedtuple("_Gram", ["begin", "end"]) + + +def _window(iterable, size): + """Helper to create a sliding window iterator with a given size. + E.g., + input = [1, 2, 3, 4] + _window(input, 1) => [1], [2], [3], [4] + _window(input, 2) => [1, 2], [2, 3], [3, 4] + _window(input, 3) => [1, 2, 3], [2, 3, 4] + _window(input, 4) => [1, 2, 3, 4] + _window(input, 5) => None + Args: + iterable: elements to iterate over. + size: size of the window. + Yields: + Elements of `iterable` batched into a sliding window of length `size`. + """ + i = iter(iterable) + window = [] + try: + for e in range(0, size): + window.append(next(i)) + yield window + except StopIteration: + # handle the case where iterable's length is less than the window size. + return + for e in i: + window = window[1:] + [e] + yield window + + +def _contiguous(sorted_grams): + """Test whether a sequence of grams is contiguous. + Args: + sorted_grams: _Grams which are sorted in increasing order. + Returns: + True if `sorted_grams` are touching each other. + E.g., + _contiguous([(1, 4), (4, 5), (5, 10)]) == True + _contiguous([(1, 2), (4, 5)]) == False + """ + for a, b in _window(sorted_grams, 2): + if a.end != b.begin: + return False + return True + + +def _wordpieces_to_grams(tokens, special_tokens): + """Reconstitue grams (words) from `tokens`. + E.g., + tokens: ['[CLS]', 'That', 'lit', '##tle', 'blue', 'tru', '##ck', '[SEP]'] + grams: [ [1,2), [2, 4), [4,5) , [5, 6)] + Args: + tokens: list of wordpieces + Returns: + List of _Grams representing spans of whole words + (without "[CLS]" and "[SEP]"). + """ + grams = [] + gram_start_pos = None + for i, token in enumerate(tokens): + if gram_start_pos is not None and token.startswith("##"): + continue + if gram_start_pos is not None: + grams.append(_Gram(gram_start_pos, i)) + if token not in special_tokens: + gram_start_pos = i + else: + gram_start_pos = None + if gram_start_pos is not None: + grams.append(_Gram(gram_start_pos, len(tokens))) + return grams + + +def _masking_ngrams(grams, max_ngram_size, max_masked_tokens): + """Create a list of masking {1, ..., n}-grams from a list of one-grams. + This is an extention of 'whole word masking' to mask multiple, contiguous + words such as (e.g., "the red boat"). + Each input gram represents the token indices of a single word, + words: ["the", "red", "boat"] + tokens: ["the", "red", "boa", "##t"] + grams: [(0,1), (1,2), (2,4)] + For a `max_ngram_size` of three, possible outputs masks include: + 1-grams: (0,1), (1,2), (2,4) + 2-grams: (0,2), (1,4) + 3-grams; (0,4) + Output masks will not overlap and contain less than `max_masked_tokens` total + tokens. E.g., for the example above with `max_masked_tokens` as three, + valid outputs are, + [(0,1), (1,2)] # "the", "red" covering two tokens + [(1,2), (2,4)] # "red", "boa", "##t" covering three tokens + The length of the selected n-gram follows a zipf weighting to + favor shorter n-gram sizes (weight(1)=1, weight(2)=1/2, weight(3)=1/3, ...). + Args: + grams: List of one-grams. + max_ngram_size: Maximum number of contiguous one-grams combined to create + an n-gram. + max_masked_tokens: Maximum total number of tokens to be masked. + Returns: + A list of n-grams to be used as masks. + """ + if not grams: + return None + + grams = sorted(grams) + num_tokens = grams[-1].end + + # Ensure our grams are valid (i.e., they don't overlap). + for a, b in _window(grams, 2): + if a.end > b.begin: + raise ValueError("overlapping grams: {}".format(grams)) + + # Build map from n-gram length to list of n-grams. + ngrams = {i: [] for i in range(1, max_ngram_size + 1)} + for gram_size in range(1, max_ngram_size + 1): + for g in _window(grams, gram_size): + if _contiguous(g): + # Add an n-gram which spans these one-grams. + ngrams[gram_size].append(_Gram(g[0].begin, g[-1].end)) + + # Shuffle each list of n-grams. + for v in ngrams.values(): + random.shuffle(v) + + # Create the weighting for n-gram length selection. + # Stored cummulatively for `random.choices` below. + cummulative_weights = list(itertools.accumulate([1.0 / n for n in range(1, max_ngram_size + 1)])) + + output_ngrams = [] + # Keep a bitmask of which tokens have been masked. + masked_tokens = [False] * num_tokens + # Loop until we have enough masked tokens or there are no more candidate + # n-grams of any length. + # Each code path should ensure one or more elements from `ngrams` are removed + # to guarentee this loop terminates. + while sum(masked_tokens) < max_masked_tokens and sum(len(s) for s in ngrams.values()): + # Pick an n-gram size based on our weights. + sz = random.choices(range(1, max_ngram_size + 1), cum_weights=cummulative_weights)[0] + + # Ensure this size doesn't result in too many masked tokens. + # E.g., a two-gram contains _at least_ two tokens. + if sum(masked_tokens) + sz > max_masked_tokens: + # All n-grams of this length are too long and can be removed from + # consideration. + ngrams[sz].clear() + continue + + # All of the n-grams of this size have been used. + if not ngrams[sz]: + continue + + # Choose a random n-gram of the given size. + gram = ngrams[sz].pop() + num_gram_tokens = gram.end - gram.begin + + # Check if this would add too many tokens. + if num_gram_tokens + sum(masked_tokens) > max_masked_tokens: + continue + + # Check if any of the tokens in this gram have already been masked. + if sum(masked_tokens[gram.begin : gram.end]): + continue + + # Found a usable n-gram! Mark its tokens as masked and add it to return. + masked_tokens[gram.begin : gram.end] = [True] * (gram.end - gram.begin) + output_ngrams.append(gram) + return output_ngrams + + +def create_masked_lm_predictions( + tokens, + masked_lm_prob, + max_predictions_per_seq, + vocab_words, + do_whole_word_mask, + tokenizer, + max_ngram_size=None, +): + """Creates the predictions for the masked LM objective.""" + if do_whole_word_mask: + grams = _wordpieces_to_grams(tokens=tokens, special_tokens=tokenizer.special_tokens_map.values()) + else: + # Here we consider each token to be a word to allow for sub-word masking. + if max_ngram_size: + raise ValueError("cannot use ngram masking without whole word masking") + grams = [ + _Gram(i, i + 1) for i in range(0, len(tokens)) if tokens[i] not in tokenizer.special_tokens_map.values() + ] + + num_to_predict = min(max_predictions_per_seq, max(1, int(round(len(tokens) * masked_lm_prob)))) + # Generate masks. If `max_ngram_size` in [0, None] it means we're doing + # whole word masking or token level masking. Both of these can be treated + # as the `max_ngram_size=1` case. + masked_grams = _masking_ngrams(grams, max_ngram_size or 1, num_to_predict) + masked_lms = [] + output_tokens = list(tokens) + for gram in masked_grams: + # 80% of the time, replace all n-gram tokens with [MASK] + if random.random() < 0.8: + replacement_action = lambda idx: tokenizer.mask_token + else: + # 10% of the time, keep all the original n-gram tokens. + if random.random() < 0.5: + replacement_action = lambda idx: tokens[idx] + # 10% of the time, replace each n-gram token with a random word. + else: + replacement_action = lambda idx: random.choice(vocab_words) + + for idx in range(gram.begin, gram.end): + output_tokens[idx] = replacement_action(idx) + masked_lms.append(MaskedLmInstance(index=idx, label=tokens[idx])) + + assert len(masked_lms) <= num_to_predict + masked_lms = sorted(masked_lms, key=lambda x: x.index) + + masked_lm_positions = [] + masked_lm_labels = [] + for p in masked_lms: + masked_lm_positions.append(p.index) + masked_lm_labels.append(p.label) + + return (output_tokens, masked_lm_positions, masked_lm_labels) + + +class ExampleBuilder(object): + """Given a stream of input text, creates pretraining examples.""" + + def __init__(self, args, tokenizer): + self.args = args + self._tokenizer = tokenizer + self._current_sentences = [] + self._current_length = 0 + self._max_length = args.max_seq_length + self._long_seq_length_limit = int(self._max_length * args.long_seq_threshold) + self._target_length = self._max_length + self.n_build = 0 + self.vocab_for_random_replacement = [] + + # NOTE This will except special tokens (e.g. [CLS], [unused0], ) + for token in self._tokenizer.vocab.keys(): + if not (token.startswith("[unused") and token.endswith("]")): + self.vocab_for_random_replacement.append(token) + + for special_token in self._tokenizer.special_tokens_map.values(): + if special_token not in self.vocab_for_random_replacement: + self.vocab_for_random_replacement.append(special_token) + + def add_line(self, line): + """Adds a line of text to the current example being built.""" + line = line.strip().replace("\n", " ") # NOTE BertTokenizer will cover whitespace cleaning + if not line: # NOTE If it is empty string + return None + bert_tokens = self._tokenizer.tokenize(line) + self._current_sentences.append(bert_tokens) + self._current_length += len(bert_tokens) + if self._current_length >= self._target_length: + return self._create_example() + return None + + def _create_example(self): + """Creates a pre-training example from the current list of sentences.""" + # small chance to have two segment + if random.random() < self.args.sentence_pair_prob: + # -3 due to not yet having [CLS]/[SEP] tokens in the input text + first_segment_target_length = (self._target_length - 3) // 2 + else: + # NOTE It will be only one segment for BigBird + first_segment_target_length = 100000 + + first_segment = [] + second_segment = [] + for sentence in self._current_sentences: + # the sentence goes to the first segment if (1) the first segment is + # empty, (2) the sentence doesn't put the first segment over length or + # (3) 50% of the time when it does put the first segment over length + if ( + len(first_segment) == 0 + or len(first_segment) + len(sentence) < first_segment_target_length + or ( + len(second_segment) == 0 + and len(first_segment) < first_segment_target_length + and random.random() < 0.5 + ) + ): + first_segment += sentence + else: + second_segment += sentence + + example_lst = [] + all_segment_len = len(first_segment) + len(second_segment) + + # NOTE + # If `first_seg + second_seg` is too long, we will make them to multiple chunk of single sentence + if all_segment_len >= self._long_seq_length_limit: + all_segment = first_segment + second_segment + + seq_len_for_example = self._max_length - 2 # NOTE -2 for [CLS]/[SEP] + for i in range(0, all_segment_len, seq_len_for_example): + example_lst.append( + self._make_tf_example( + first_segment=all_segment[i : i + seq_len_for_example], + second_segment=None, + ) + ) + else: + # trim to max_length while accounting for not-yet-added [CLS]/[SEP] tokens + first_segment = first_segment[: self._max_length - 2] + second_segment = second_segment[: max(0, self._max_length - len(first_segment) - 3)] + # NOTE Put it in List as batch size 1 + example_lst.append(self._make_tf_example(first_segment, second_segment)) + + # prepare to start building the next example + self._current_sentences = [] + self._current_length = 0 + # small chance for random-length instead of max_length-length example + if random.random() < self.args.short_seq_prob: + self._target_length = random.randint(5, self._max_length) + else: + self._target_length = self._max_length + + return example_lst + + def _make_tf_example(self, first_segment, second_segment): + """Converts two "segments" of text into a tf.train.Example.""" + tokens = [self._tokenizer.cls_token] + first_segment + [self._tokenizer.sep_token] + segment_ids = [0] * len(tokens) + if second_segment: + tokens += second_segment + [self._tokenizer.sep_token] + segment_ids += [1] * (len(second_segment) + 1) + + # Masking + (tokens, masked_lm_positions, masked_lm_labels) = create_masked_lm_predictions( + tokens=tokens, + masked_lm_prob=self.args.masked_lm_prob, + max_predictions_per_seq=self.args.max_predictions_per_seq, + vocab_words=self.vocab_for_random_replacement, + do_whole_word_mask=self.args.do_whole_word_mask, + tokenizer=self._tokenizer, + max_ngram_size=self.args.max_ngram_size, + ) + # tokens -> input_ids + input_ids = self._tokenizer.convert_tokens_to_ids(tokens) + + # Padding + input_ids += [self._tokenizer.pad_token_id] * (self._max_length - len(tokens)) + segment_ids += [0] * (self._max_length - len(segment_ids)) + + masked_lm_positions = list(masked_lm_positions) + masked_lm_ids = self._tokenizer.convert_tokens_to_ids(masked_lm_labels) + masked_lm_weights = [1.0] * len(masked_lm_ids) + + while len(masked_lm_positions) < self.args.max_predictions_per_seq: + masked_lm_positions.append(0) + masked_lm_ids.append(self._tokenizer.pad_token_id) + masked_lm_weights.append(0.0) + + assert len(input_ids) == self._max_length + assert len(segment_ids) == self._max_length + assert len(masked_lm_positions) == self.args.max_predictions_per_seq + assert len(masked_lm_ids) == self.args.max_predictions_per_seq + assert len(masked_lm_weights) == self.args.max_predictions_per_seq + + features = collections.OrderedDict() + features["input_ids"] = create_int_feature(input_ids) + features["segment_ids"] = create_int_feature(segment_ids) + features["masked_lm_positions"] = create_int_feature(masked_lm_positions) + features["masked_lm_ids"] = create_int_feature(masked_lm_ids) + features["masked_lm_weights"] = create_float_feature(masked_lm_weights) + features["next_sentence_labels"] = create_int_feature([0]) # NOTE Dummy value + + self.n_build += 1 + + if self.args.debug and self.n_build < 3: + tf.compat.v1.logging.info("*** Example ***") + tf.compat.v1.logging.info("tokens: %s", " ".join([printable_text(x) for x in tokens])) + + for feature_name in features.keys(): + feature = features[feature_name] + values = [] + if feature.int64_list.value: + values = feature.int64_list.value + elif feature.float_list.value: + values = feature.float_list.value + tf.compat.v1.logging.info("%s: %s", feature_name, " ".join([str(x) for x in values])) + + tf_example = tf.train.Example(features=tf.train.Features(feature=features)) + return tf_example + + +class ExampleWriter(object): + """Writes pre-training examples to disk.""" + + def __init__( + self, + args, + job_id, + num_out_files=1000, + ): + self.tokenizer_dir = args.tokenizer_dir + self.output_dir = args.output_dir + self.max_seq_length = args.max_seq_length + self.num_jobs = args.num_processes + + tokenizer = AutoTokenizer.from_pretrained(self.tokenizer_dir, use_fast=True) + self._example_builder = ExampleBuilder(args, tokenizer) + self._writers = [] + for i in range(num_out_files): + if i % self.num_jobs == job_id: + output_fname = os.path.join( + self.output_dir, + "pretrain_data.tfrecord-{:}-of-{:}".format(i, num_out_files), + ) + self._writers.append(tf.io.TFRecordWriter(output_fname)) + self.n_written = 0 + + def write_examples(self, input_file): + """Writes out examples from the provided input file.""" + if input_file.endswith(".txt"): + self.write_examples_from_txt(input_file) + elif input_file.endswith(".jsonl.zst"): + self.write_examples_from_kldf(input_file) + else: + print(f"{input_file} is not supported. Only supports `jsonl.zst` or `txt` file") + + # NOTE Flush + if self._example_builder._current_length != 0: + example_lst = self._example_builder._create_example() + if example_lst: + for example in example_lst: + self._writers[self.n_written % len(self._writers)].write(example.SerializeToString()) + self.n_written += 1 + + def write_examples_from_txt(self, input_file): + with open(input_file, "r", encoding="utf-8") as f: + for line in f: + line = line.strip() + if line: + example_lst = self._example_builder.add_line(line) + if example_lst: + for example in example_lst: + self._writers[self.n_written % len(self._writers)].write(example.SerializeToString()) + self.n_written += 1 + + def write_examples_from_kldf(self, input_file): + rdr = kldf.Reader(input_file) + for doc in rdr.stream_data(get_meta=False, threaded=False): + if type(doc) == str: + doc = [doc] + + for line in doc: + line = line.strip() + if line: + example_lst = self._example_builder.add_line(line) + if example_lst: + for example in example_lst: + self._writers[self.n_written % len(self._writers)].write(example.SerializeToString()) + self.n_written += 1 + + def finish(self): + for writer in self._writers: + writer.close() + + +def write_examples(fnames, job_id, args): + """A single process creating and writing out pre-processed examples.""" + + def log(*args): + msg = " ".join(map(str, args)) + print("Job {}:".format(job_id), msg) + + log("Creating example writer") + example_writer = ExampleWriter(args=args, job_id=job_id) + log("Writing tf examples") + fnames = [f for (i, f) in enumerate(fnames) if i % args.num_processes == job_id] + + # https://pytorch.org/docs/stable/data.html#data-loading-randomness + seed_worker(job_id, args) + + random.shuffle(fnames) + start_time = time.time() + # Add dupe factor + file_processed = 0 + for dupe_idx in range(args.dupe_factor): + for file_no, fname in enumerate(fnames): + if file_processed > 0: + elapsed = time.time() - start_time + log( + "Processed dupe {:}, {:}/{:} files ({:.1f}%), " + "{:} examples written, (ELAPSED: {:}s, ETA: {:}s)".format( + dupe_idx, + file_no, + len(fnames), + 100.0 * file_no / len(fnames), + example_writer.n_written, + int(elapsed), + int((len(fnames) * args.dupe_factor - file_processed) / (file_processed / elapsed)), + ) + ) + example_writer.write_examples(fname) + file_processed += 1 + example_writer.finish() + log("Done!") + + +def log_config(config): + def log(*args): + msg = ": ".join(map(str, args)) + sys.stdout.write(msg + "\n") + sys.stdout.flush() + + for key, value in sorted(config.__dict__.items()): + log(key, value) + log() + + +def seed_worker(job_id, args): + worker_seed = (args.seed + job_id) % 2 ** 32 + np.random.seed(worker_seed) + random.seed(worker_seed) + + +def get_files(input_dir): + """ + Get all files from input directory. + ONLY support `jsonl.zst` and `txt` (kldf format or plain text) + """ + filetypes = ["jsonl.zst", "txt"] + files = [list(Path(input_dir).rglob(f"*.{ft}")) for ft in filetypes] + # flatten list of list -> list and stringify Paths + flattened_list = [str(item) for sublist in files for item in sublist] + if not flattened_list: + raise Exception( + f"""did not find any files at this path {input_dir}, please also ensure your files are in format {filetypes}""" + ) + flattened_list = sorted(flattened_list) + return flattened_list + + +def main(): + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO) + + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument("--input_dir", required=True, help="Location of text or ko_lm_dataformat files.") + parser.add_argument("--tokenizer_dir", required=True, help="Location of tokenizer directory.") + parser.add_argument("--output_dir", required=True, help="Where to write out the tfrecords.") + parser.add_argument("--max_seq_length", default=512, type=int, help="Number of tokens per example.") + parser.add_argument( + "--max_predictions_per_seq", + default=76, + type=int, + help="Maximum number of masked LM predictions per sequence.", + ) + parser.add_argument( + "--num_processes", + default=0, + type=int, + help="Parallelize across multiple processes. 0 will set the number of detected logical CPUs.", + ) + parser.add_argument( + "--sentence_pair_prob", + default=0.05, + type=float, + help="Probability for make input with sentence pair ([CLS] text_a [SEP] text_b [SEP])", + ) + parser.add_argument( + "--short_seq_prob", + default=0.01, + type=float, + help="Probability of creating sequences which are shorter than the maximum length.", + ) + parser.add_argument( + "--do_whole_word_mask", + action="store_true", + help="Whether to use whole word masking rather than per-WordPiece masking.", + ) + parser.add_argument( + "--max_ngram_size", + type=int, + default=None, + help="Mask contiguous whole words (n-grams) of up to `max_ngram_size` using a weighting scheme to favor shorter n-grams. " + "Note: `--do_whole_word_mask=True` must also be set when n-gram masking.", + ) + parser.add_argument( + "--dupe_factor", + type=int, + default=2, + help="Number of times to duplicate the input data (with different masks).", + ) + parser.add_argument("--masked_lm_prob", type=float, default=0.15, help="Masked LM probability.") + parser.add_argument( + "--debug", + action="store_true", + help="Debug the result of tokenization, masking etc.", + ) + parser.add_argument("--seed", default=12345, type=int, help="Random seed for data generation.") + parser.add_argument( + "--long_seq_threshold", + default=1.8, + type=float, + help="Threshold for extremely long sequence. " + "If sequence >= int(max_seq_len * threshold), split the sequence into multiple chunk", + ) + + args = parser.parse_args() + + assert args.long_seq_threshold > 1.0 + + log_config(args) + + if tf.io.gfile.exists(args.output_dir): + tf.io.gfile.rmtree(args.output_dir) + if not tf.io.gfile.exists(args.output_dir): + tf.io.gfile.makedirs(args.output_dir) + + fnames = get_files(args.input_dir) + print(f"Total number of files: {len(fnames)}") + + if args.num_processes == 1: + write_examples(fnames, 0, args) + else: + if args.num_processes == 0: + args.num_processes = multiprocessing.cpu_count() + + jobs = [] + for i in range(args.num_processes): + job = multiprocessing.Process(target=write_examples, args=(fnames, i, args)) + jobs.append(job) + job.start() + for job in jobs: + job.join() + + +if __name__ == "__main__": + main() diff --git a/pretrain/kobigbird/__init__.py b/pretrain/kobigbird/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pretrain/kobigbird/attention.py b/pretrain/kobigbird/attention.py new file mode 100644 index 0000000..e0ec0eb --- /dev/null +++ b/pretrain/kobigbird/attention.py @@ -0,0 +1,1062 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""BigBird Attention Layers.""" + +import numpy as np +import tensorflow.compat.v2 as tf +from absl import logging +from kobigbird import recompute_grad, utils + + +def get_single_block_row_attention( + block_id, + to_start_block_id, + to_end_block_id, + num_rand_blocks, + window_block_left=1, + window_block_right=1, + global_block_left=1, + global_block_right=1, +): + """For a single row block get random row attention. + + Args: + block_id: int. block id of row. + to_start_block_id: int. random attention coloum start id. + to_end_block_id: int. random attention coloum end id. + num_rand_blocks: int. number of random blocks to be selected. + window_block_left: int. number of blocks of window to left of a block. + window_block_right: int. number of blocks of window to right of a block. + global_block_left: int. Number of blocks globally used to the left. + global_block_right: int. Number of blocks globally used to the right. + + Returns: + row containing the random attention vector of size num_rand_blocks. + """ + + # list of to_blocks from which to choose random attention + to_block_list = np.arange(to_start_block_id, to_end_block_id, dtype=np.int32) + # permute the blocks + perm_block = np.random.permutation(to_block_list) + # print(perm_block) + + # illegal blocks for the current block id, using window + illegal_blocks = list(range(block_id - window_block_left, block_id + window_block_right + 1)) + + # Add blocks at the start and at the end + illegal_blocks.extend(list(range(global_block_left))) + illegal_blocks.extend(list(range(to_end_block_id - global_block_right, to_end_block_id))) + + # The second from_block cannot choose random attention on second last to_block + if block_id == 1: + illegal_blocks.append(to_end_block_id - 2) + + # The second last from_block cannot choose random attention on second to_block + if block_id == to_end_block_id - 2: + illegal_blocks.append(1) + + selected_random_blokcs = [] + + for i in range(to_end_block_id - to_start_block_id): + if perm_block[i] not in illegal_blocks: + selected_random_blokcs.append(perm_block[i]) + if len(selected_random_blokcs) == num_rand_blocks: + break + return np.array(selected_random_blokcs, dtype=np.int32) + + +def bigbird_block_rand_mask_with_head( + seq_length, + block_size, + num_heads, + plan_from_length, + plan_num_rand_blocks, + window_block_left=1, + window_block_right=1, + global_block_top=1, + global_block_bottom=1, + global_block_left=1, + global_block_right=1, +): + """Create adjacency list of random attention. + + Args: + seq_length: int. length of sequence. + block_size: int. size of block in sequence. + num_heads: int. total number of heads. + plan_from_length: list. plan from lenght where num_rand are choosen from. + plan_num_rand_blocks: list. number of rand blocks within the plan. + window_block_left: int. number of blocks of window to left of a block. + window_block_right: int. number of blocks of window to right of a block. + global_block_top: int. number of blocks at the top. + global_block_bottom: int. number of blocks at the bottom. + global_block_left: int. Number of blocks globally used to the left. + global_block_right: int. Number of blocks globally used to the right. + + Returns: + adjacency list of size num_head where each element is of size + from_seq_length//from_block_size-2 by num_rand_blocks + """ + # Total number of blocks in the mmask + num_blocks = seq_length // block_size + # Number of blocks per plan + plan_block_length = np.array(plan_from_length) // block_size + # till when to follow plan + max_plan_idx = plan_from_length.index(seq_length) + # Random Attention adjajency list + rand_attn = [ + np.zeros( + (num_blocks, np.sum(plan_num_rand_blocks[: max_plan_idx + 1])), + dtype=np.int32, + ) + for i in range(num_heads) + ] + + # We will go iteratively over the plan blocks and pick random number of + # Attention blocks from the legally allowed blocks + for plan_idx in range(max_plan_idx + 1): + rnd_r_cnt = 0 + if plan_idx > 0: + # set the row for all from_blocks starting from 0 to + # plan_block_length[plan_idx-1] + # column indx start fromm plan_block_length[plan_idx-1] and ends at + # plan_block_length[plan_idx] + if plan_num_rand_blocks[plan_idx] > 0: + rnd_r_cnt = int(np.sum(plan_num_rand_blocks[:plan_idx])) + curr_r_cnt = int(np.sum(plan_num_rand_blocks[: plan_idx + 1])) + for blk_rw_idx in range(global_block_top, plan_block_length[plan_idx - 1]): + for h in range(num_heads): + # print("head", h, "blk_rw_idx", blk_rw_idx) + rand_attn[h][blk_rw_idx, rnd_r_cnt:curr_r_cnt] = get_single_block_row_attention( + block_id=blk_rw_idx, + to_start_block_id=plan_block_length[plan_idx - 1], + to_end_block_id=plan_block_length[plan_idx], + num_rand_blocks=plan_num_rand_blocks[plan_idx], + window_block_left=window_block_left, + window_block_right=window_block_right, + global_block_left=global_block_left, + global_block_right=global_block_right, + ) + + for pl_id in range(plan_idx): + if plan_num_rand_blocks[pl_id] == 0: + continue + for blk_rw_idx in range(plan_block_length[plan_idx - 1], plan_block_length[plan_idx]): + rnd_r_cnt = 0 + to_start_block_id = 0 + if pl_id > 0: + rnd_r_cnt = int(np.sum(plan_num_rand_blocks[:pl_id])) + to_start_block_id = plan_block_length[pl_id - 1] + curr_r_cnt = int(np.sum(plan_num_rand_blocks[: pl_id + 1])) + for h in range(num_heads): + # print("head", h, "blk_rw_idx", blk_rw_idx) + rand_attn[h][blk_rw_idx, rnd_r_cnt:curr_r_cnt] = get_single_block_row_attention( + block_id=blk_rw_idx, + to_start_block_id=to_start_block_id, + to_end_block_id=plan_block_length[pl_id], + num_rand_blocks=plan_num_rand_blocks[pl_id], + window_block_left=window_block_left, + window_block_right=window_block_right, + global_block_left=global_block_left, + global_block_right=global_block_right, + ) + + if plan_num_rand_blocks[plan_idx] == 0: + continue + # print("Start from here") + curr_r_cnt = int(np.sum(plan_num_rand_blocks[: plan_idx + 1])) + from_start_block_id = global_block_top + to_start_block_id = 0 + if plan_idx > 0: + rnd_r_cnt = int(np.sum(plan_num_rand_blocks[:plan_idx])) + from_start_block_id = plan_block_length[plan_idx - 1] + to_start_block_id = plan_block_length[plan_idx - 1] + + for blk_rw_idx in range(from_start_block_id, plan_block_length[plan_idx]): + for h in range(num_heads): + # print("head", h, "blk_rw_idx", blk_rw_idx) + rand_attn[h][blk_rw_idx, rnd_r_cnt:curr_r_cnt] = get_single_block_row_attention( + block_id=blk_rw_idx, + to_start_block_id=to_start_block_id, + to_end_block_id=plan_block_length[plan_idx], + num_rand_blocks=plan_num_rand_blocks[plan_idx], + window_block_left=window_block_left, + window_block_right=window_block_right, + global_block_left=global_block_left, + global_block_right=global_block_right, + ) + + for nh in range(num_heads): + rand_attn[nh] = rand_attn[nh][global_block_top : num_blocks - global_block_bottom, :] + return rand_attn + + +def get_rand_attn_plan(from_seq_length, from_block_size, num_rand_blocks): + """Gives the plan of where to put random attention. + + Args: + from_seq_length: int. length of from sequence. + from_block_size: int. size of block in from sequence. + num_rand_blocks: int. Number of random chunks per row. + + Returns: + plan_from_length: ending location of from block + plan_num_rand_blocks: number of random ending location for each block + """ + # general plan + plan_from_length = [] + plan_num_rand_blocks = [] + if (2 * num_rand_blocks + 5) < (from_seq_length // from_block_size): + plan_from_length.append(int((2 * num_rand_blocks + 5) * from_block_size)) + plan_num_rand_blocks.append(num_rand_blocks) + plan_from_length.append(from_seq_length) + plan_num_rand_blocks.append(0) + elif (num_rand_blocks + 5) < (from_seq_length // from_block_size): + plan_from_length.append(int((num_rand_blocks + 5) * from_block_size)) + plan_num_rand_blocks.append(num_rand_blocks // 2) + plan_from_length.append(from_seq_length) + plan_num_rand_blocks.append(num_rand_blocks - (num_rand_blocks // 2)) + else: + plan_from_length.append(from_seq_length) + plan_num_rand_blocks.append(num_rand_blocks) + + return plan_from_length, plan_num_rand_blocks + + +def bigbird_block_rand_mask( + from_seq_length, + to_seq_length, + from_block_size, + to_block_size, + num_rand_blocks, + last_idx=-1, +): + """Create adjacency list of random attention. + + Args: + from_seq_length: int. length of from sequence. + to_seq_length: int. length of to sequence. + from_block_size: int. size of block in from sequence. + to_block_size: int. size of block in to sequence. + num_rand_blocks: int. Number of random chunks per row. + last_idx: if -1 then num_rand_blocks blocks chosen anywhere in to sequence, + if positive then num_rand_blocks blocks choosen only upto last_idx. + + Returns: + adjacency list of size from_seq_length//from_block_size-2 by num_rand_blocks + """ + rand_attn = np.zeros((from_seq_length // from_block_size - 2, num_rand_blocks), dtype=np.int32) + middle_seq = np.arange(1, to_seq_length // to_block_size - 1, dtype=np.int32) + last = to_seq_length // to_block_size - 1 + if last_idx > (2 * to_block_size): + last = (last_idx // to_block_size) - 1 + + r = num_rand_blocks # shorthand + for i in range(1, from_seq_length // from_block_size - 1): + start = i - 2 + end = i + if i == 1: + rand_attn[i - 1, :] = np.random.permutation(middle_seq[2:last])[:r] + elif i == 2: + rand_attn[i - 1, :] = np.random.permutation(middle_seq[3:last])[:r] + elif i == from_seq_length // from_block_size - 3: + rand_attn[i - 1, :] = np.random.permutation(middle_seq[:last])[:r] + # Missing -3: should have been sliced till last-3 + elif i == from_seq_length // from_block_size - 2: + rand_attn[i - 1, :] = np.random.permutation(middle_seq[:last])[:r] + # Missing -4: should have been sliced till last-4 + else: + if start > last: + start = last + rand_attn[i - 1, :] = np.random.permutation(middle_seq[:start])[:r] + elif (end + 1) == last: + rand_attn[i - 1, :] = np.random.permutation(middle_seq[:start])[:r] + else: + rand_attn[i - 1, :] = np.random.permutation( + np.concatenate((middle_seq[:start], middle_seq[end + 1 : last])) + )[:r] + return rand_attn + + +def full_bigbird_mask(max_seq_length, from_seq_length, to_seq_length, from_block_size, to_block_size, rand_attn): + """Calculate BigBird attention pattern as a full dense matrix. + + Args: + max_seq_length: int. max sequence length of model + from_seq_length: int. length of from sequence. + to_seq_length: int. length of to sequence. + from_block_size: int. size of block in from sequence. + to_block_size: int. size of block in to sequence. + rand_attn: adjajency matrix for random attention. + + Returns: + attention mask matrix of shape [from_seq_length, to_seq_length] + """ + + attn_mask = np.zeros((max_seq_length, max_seq_length), dtype=np.int32) + for i in range(1, (max_seq_length // from_block_size) - 1): + attn_mask[ + (i) * from_block_size : (i + 1) * from_block_size, + (i - 1) * to_block_size : (i + 2) * to_block_size, + ] = 1 + for j in rand_attn[i - 1, :]: + attn_mask[ + i * from_block_size : (i + 1) * from_block_size, + j * to_block_size : (j + 1) * to_block_size, + ] = 1 + + attn_mask[:from_block_size, :] = 1 + attn_mask[:, :to_block_size] = 1 + attn_mask[:, -to_block_size:] = 1 + attn_mask[-from_block_size:, :] = 1 + clipped_attn_mask = attn_mask[:from_seq_length, :to_seq_length] + return np.array(clipped_attn_mask, dtype=bool) + + +def create_rand_mask_from_inputs( + from_blocked_mask, + to_blocked_mask, + rand_attn, + num_attention_heads, + num_rand_blocks, + from_seq_length, + from_block_size, +): + """Create 4D attention mask from a 3D tensor mask. + + Args: + from_blocked_mask: 2D Tensor of shape [batch_size, + from_seq_length//from_block_size, from_block_size]. + to_blocked_mask: int32 Tensor of shape [batch_size, + to_seq_length//to_block_size, to_block_size]. + rand_attn: [batch_size, num_attention_heads, + from_seq_length//from_block_size-2, num_rand_blocks] + num_attention_heads: int. Number of attention heads. + num_rand_blocks: int. Number of random chunks per row. + from_seq_length: int. length of from sequence. + from_block_size: int. size of block in from sequence. + + Returns: + float Tensor of shape [batch_size, num_attention_heads, + from_seq_length//from_block_size-2, + from_block_size, num_rand_blocks*to_block_size]. + """ + num_windows = from_seq_length // from_block_size - 2 + rand_mask = tf.reshape( + tf.gather(to_blocked_mask, rand_attn, batch_dims=1), + [-1, num_attention_heads, num_windows, num_rand_blocks * from_block_size], + ) + rand_mask = tf.einsum("BLQ,BHLK->BHLQK", from_blocked_mask[:, 1:-1], rand_mask) + return rand_mask + + +def create_band_mask_from_inputs(from_blocked_mask, to_blocked_mask): + """Create 4D attention mask from a 3D blocked tensor mask. + + Args: + from_blocked_mask: 3D Tensor of shape [batch_size, + from_seq_length//from_block_size, from_block_size]. + to_blocked_mask: 3D Tensor of shape [batch_size, + to_seq_length//to_block_size, to_block_size]. + + Returns: + float Tensor of shape [batch_size, 1, from_seq_length//from_block_size-4, + from_block_size, 3*to_block_size]. + """ + exp_blocked_to_pad = tf.concat( + [to_blocked_mask[:, 1:-3], to_blocked_mask[:, 2:-2], to_blocked_mask[:, 3:-1]], + 2, + ) + band_mask = tf.einsum("BLQ,BLK->BLQK", from_blocked_mask[:, 2:-2], exp_blocked_to_pad) + band_mask = tf.expand_dims(band_mask, 1) + return band_mask + + +def create_attention_mask_from_input_mask(from_mask, to_mask): + """Create attention mask from a 2D tensor mask. + + Args: + from_mask: float32 Tensor of shape [batch_size, from_seq_length]. + to_mask: float32 Tensor of shape [batch_size, to_seq_length]. + + Returns: + float32 Tensor of shape [batch_size, 1, from_seq_length, to_seq_length]. + """ + mask = tf.einsum("BF,BT->BFT", from_mask, to_mask) + + # expand to create a slot for heads. + mask = tf.expand_dims(mask, 1) + + return mask + + +def bigbird_block_sparse_attention( + query_layer, + key_layer, + value_layer, + band_mask, + from_mask, + to_mask, + from_blocked_mask, + to_blocked_mask, + rand_attn, + num_attention_heads, + size_per_head, + num_rand_blocks, + from_seq_length, + to_seq_length, + from_block_size, + to_block_size, +): + """BigBird attention sparse calculation using blocks in linear time. + + Assumes from_seq_length//from_block_size == to_seq_length//to_block_size. + A pure function with a long argument list to allow easy use outside our + framework. + + Args: + query_layer: float Tensor of shape [batch_size, num_attention_heads, + from_seq_length, size_per_head] + key_layer: float Tensor of shape [batch_size, num_attention_heads, + to_seq_length, size_per_head] + value_layer: float Tensor of shape [batch_size, num_attention_heads, + to_seq_length, size_per_head] + band_mask: float32 Tensor of shape [batch_size, 1, + from_seq_length//from_block_size-4, from_block_size, 3*to_block_size]. + The values should be 1 or 0. The attention scores will effectively be + set to -infinity for any positions in the mask that are 0, and will be + unchanged for positions that are 1. + from_mask: float32 Tensor of shape [batch_size, 1, from_seq_length, 1]. + The values should be 1 or 0. The attention scores will effectively be set + to -infinity for any positions in the mask that are 0, and will be + unchanged for positions that are 1. + to_mask: float32 Tensor of shape [batch_size, 1, 1, to_seq_length]. + The values should be 1 or 0. The attention scores will effectively be set + to -infinity for any positions in the mask that are 0, and will be + unchanged for positions that are 1. + from_blocked_mask: float32 Tensor of shape [batch_size, + from_seq_length//from_block_size, from_block_size]. + Same as from_mask, just reshaped. + to_blocked_mask: float32 Tensor of shape [batch_size, + to_seq_length//to_block_size, to_block_size]. + Same as to_mask, just reshaped. + rand_attn: int32 Tensor of shape [num_attention_heads, + from_seq_length//from_block_size-2, num_rand_blocks] specifying which + blocks to attend to for each from sequence block (except 2 global ones). + num_attention_heads: int. Number of attention heads. + size_per_head: int. Size of each attention head. + num_rand_blocks: int. Number of random chunks per row. + from_seq_length: int. length of from sequence. + to_seq_length: int. length of to sequence. + from_block_size: int. size of block in from sequence. + to_block_size: int. size of block in to sequence. + + Returns: + float Tensor of shape [batch_size, from_seq_length, num_attention_heads, + size_per_head]. + """ + assert from_seq_length // from_block_size == to_seq_length // to_block_size + + # repeat for batch size + batch_size = utils.get_shape_list(query_layer)[0] + rand_attn = tf.expand_dims(rand_attn, 0) + rand_attn = tf.repeat(rand_attn, batch_size, 0) + + rand_mask = create_rand_mask_from_inputs( + from_blocked_mask, + to_blocked_mask, + rand_attn, + num_attention_heads, + num_rand_blocks, + from_seq_length, + from_block_size, + ) + + # Define shorthands + # b = batch_size + h = num_attention_heads + r = num_rand_blocks + d = size_per_head + m = from_seq_length + n = to_seq_length + wm = from_block_size + wn = to_block_size + + blocked_query_matrix = tf.reshape(query_layer, (-1, h, m // wm, wm, d)) + blocked_key_matrix = tf.reshape(key_layer, (-1, h, n // wn, wn, d)) + blocked_value_matrix = tf.reshape(value_layer, (-1, h, n // wn, wn, d)) + gathered_key = tf.reshape( + tf.gather(blocked_key_matrix, rand_attn, batch_dims=2, name="gather_key"), + (-1, h, m // wm - 2, r * wn, d), + ) # [b, h, n//wn-2, r, wn, -1] + gathered_value = tf.reshape( + tf.gather(blocked_value_matrix, rand_attn, batch_dims=2, name="gather_value"), + (-1, h, m // wm - 2, r * wn, d), + ) # [b, h, n//wn-2, r, wn, -1] + + first_product = tf.einsum( + "BHQD,BHKD->BHQK", blocked_query_matrix[:, :, 0], key_layer + ) # [b, h, wm, -1] x [b, h, n, -1] ==> [b, h, wm, n] + first_product = tf.multiply(first_product, 1.0 / np.sqrt(d)) + first_product += (1.0 - to_mask) * -10000.0 + first_attn_weights = tf.nn.softmax(first_product) # [b, h, wm, n] + first_context_layer = tf.einsum( + "BHQK,BHKD->BHQD", first_attn_weights, value_layer + ) # [b, h, wm, n] x [b, h, n, -1] ==> [b, h, wm, -1] + first_context_layer = tf.expand_dims(first_context_layer, 2) + + second_key_mat = tf.concat( + [ + blocked_key_matrix[:, :, 0], + blocked_key_matrix[:, :, 1], + blocked_key_matrix[:, :, 2], + blocked_key_matrix[:, :, -1], + gathered_key[:, :, 0], + ], + 2, + ) # [b, h, (4+r)*wn, -1] + second_value_mat = tf.concat( + [ + blocked_value_matrix[:, :, 0], + blocked_value_matrix[:, :, 1], + blocked_value_matrix[:, :, 2], + blocked_value_matrix[:, :, -1], + gathered_value[:, :, 0], + ], + 2, + ) # [b, h, (4+r)*wn, -1] + second_product = tf.einsum( + "BHQD,BHKD->BHQK", blocked_query_matrix[:, :, 1], second_key_mat + ) # [b, h, wm, -1] x [b, h, (4+r)*wn, -1] ==> [b, h, wm, (4+r)*wn] + second_seq_pad = tf.concat( + [ + to_mask[:, :, :, : 3 * wn], + to_mask[:, :, :, -wn:], + tf.ones_like(rand_mask[:, :1, 0, :1]), + ], + 3, + ) + second_rand_pad = tf.concat([tf.ones_like(second_product[:, :, :, : 4 * wn]), rand_mask[:, :, 0]], 3) + second_product = tf.multiply(second_product, 1.0 / np.sqrt(d)) + second_product += (1.0 - tf.minimum(second_seq_pad, second_rand_pad)) * -10000.0 + second_attn_weights = tf.nn.softmax(second_product) # [b , h, wm, (4+r)*wn] + second_context_layer = tf.einsum( + "BHQK,BHKD->BHQD", second_attn_weights, second_value_mat + ) # [b, h, wm, (4+r)*wn] x [b, h, (4+r)*wn, -1] ==> [b, h, wm, -1] + second_context_layer = tf.expand_dims(second_context_layer, 2) + + exp_blocked_key_matrix = tf.concat( + [ + blocked_key_matrix[:, :, 1:-3], + blocked_key_matrix[:, :, 2:-2], + blocked_key_matrix[:, :, 3:-1], + ], + 3, + ) # [b, h, m//wm-4, 3*wn, -1] + exp_blocked_value_matrix = tf.concat( + [ + blocked_value_matrix[:, :, 1:-3], + blocked_value_matrix[:, :, 2:-2], + blocked_value_matrix[:, :, 3:-1], + ], + 3, + ) # [b, h, m//wm-4, 3*wn, -1] + middle_query_matrix = blocked_query_matrix[:, :, 2:-2] + inner_band_product = tf.einsum( + "BHLQD,BHLKD->BHLQK", middle_query_matrix, exp_blocked_key_matrix + ) # [b, h, m//wm-4, wm, -1] x [b, h, m//wm-4, 3*wn, -1] + # ==> [b, h, m//wm-4, wm, 3*wn] + inner_band_product = tf.multiply(inner_band_product, 1.0 / np.sqrt(d)) + rand_band_product = tf.einsum( + "BHLQD,BHLKD->BHLQK", middle_query_matrix, gathered_key[:, :, 1:-1] + ) # [b, h, m//wm-4, wm, -1] x [b, h, m//wm-4, r*wn, -1] + # ==> [b, h, m//wm-4, wm, r*wn] + rand_band_product = tf.multiply(rand_band_product, 1.0 / np.sqrt(d)) + first_band_product = tf.einsum( + "BHLQD,BHKD->BHLQK", middle_query_matrix, blocked_key_matrix[:, :, 0] + ) # [b, h, m//wm-4, wm, -1] x [b, h, wn, -1] ==> [b, h, m//wm-4, wm, wn] + first_band_product = tf.multiply(first_band_product, 1.0 / np.sqrt(d)) + last_band_product = tf.einsum( + "BHLQD,BHKD->BHLQK", middle_query_matrix, blocked_key_matrix[:, :, -1] + ) # [b, h, m//wm-4, wm, -1] x [b, h, wn, -1] ==> [b, h, m//wm-4, wm, wn] + last_band_product = tf.multiply(last_band_product, 1.0 / np.sqrt(d)) + inner_band_product += (1.0 - band_mask) * -10000.0 + first_band_product += (1.0 - tf.expand_dims(to_mask[:, :, :, :wn], 3)) * -10000.0 + last_band_product += (1.0 - tf.expand_dims(to_mask[:, :, :, -wn:], 3)) * -10000.0 + rand_band_product += (1.0 - rand_mask[:, :, 1:-1]) * -10000.0 + band_product = tf.concat( + [first_band_product, inner_band_product, rand_band_product, last_band_product], + -1, + ) # [b, h, m//wm-4, wm, (5+r)*wn] + attn_weights = tf.nn.softmax(band_product) # [b, h, m//wm-4, wm, (5+r)*wn] + context_layer = tf.einsum( + "BHLQK,BHLKD->BHLQD", + attn_weights[:, :, :, :, wn : 4 * wn], + exp_blocked_value_matrix, + ) # [b, h, m//wm-4, wm, 3*wn] x [b, h, m//wm-4, 3*wn, -1] + # ==> [b, h, m//wm-4, wm, -1] + context_layer += tf.einsum( + "BHLQK,BHLKD->BHLQD", + attn_weights[:, :, :, :, 4 * wn : -wn], + gathered_value[:, :, 1:-1], + ) # [b, h, m//wm-4, wm, r*wn] x [b, h, m//wm-4, r*wn, -1] + # ==> [b, h, m//wm-4, wm, -1] + context_layer += tf.einsum( + "BHLQK,BHKD->BHLQD", + attn_weights[:, :, :, :, :wn], + blocked_value_matrix[:, :, 0], + ) # [b, h, m//wm-4, wm, wn] x [b, h, wn, -1] ==> [b, h, m//wm-4, wm, -1] + context_layer += tf.einsum( + "BHLQK,BHKD->BHLQD", + attn_weights[:, :, :, :, -wn:], + blocked_value_matrix[:, :, -1], + ) # [b, h, m//wm-4, wm, wn] x [b, h, wn, -1] ==> [b, h, m//wm-4, wm, -1] + + second_last_key_mat = tf.concat( + [ + blocked_key_matrix[:, :, 0], + blocked_key_matrix[:, :, -3], + blocked_key_matrix[:, :, -2], + blocked_key_matrix[:, :, -1], + gathered_key[:, :, -1], + ], + 2, + ) # [b, h, (4+r)*wn, -1] + second_last_value_mat = tf.concat( + [ + blocked_value_matrix[:, :, 0], + blocked_value_matrix[:, :, -3], + blocked_value_matrix[:, :, -2], + blocked_value_matrix[:, :, -1], + gathered_value[:, :, -1], + ], + 2, + ) # [b, h, (4+r)*wn, -1] + second_last_product = tf.einsum( + "BHQD,BHKD->BHQK", blocked_query_matrix[:, :, -2], second_last_key_mat + ) # [b, h, wm, -1] x [b, h, (4+r)*wn, -1] ==> [b, h, wm, (4+r)*wn] + second_last_seq_pad = tf.concat( + [ + to_mask[:, :, :, :wn], + to_mask[:, :, :, -3 * wn :], + tf.ones_like(rand_mask[:, :1, 0, :1]), + ], + 3, + ) + second_last_rand_pad = tf.concat([tf.ones_like(second_last_product[:, :, :, : 4 * wn]), rand_mask[:, :, -1]], 3) + second_last_product = tf.multiply(second_last_product, 1.0 / np.sqrt(d)) + second_last_product += (1.0 - tf.minimum(second_last_seq_pad, second_last_rand_pad)) * -10000.0 + second_last_attn_weights = tf.nn.softmax(second_last_product) # [b, h, wm, (4+r)*wn] + second_last_context_layer = tf.einsum( + "BHQK,BHKD->BHQD", second_last_attn_weights, second_last_value_mat + ) # [b, h, wm, (4+r)*wn] x [b, h, (4+r)*wn, -1] ==> [b, h, wm, -1] + second_last_context_layer = tf.expand_dims(second_last_context_layer, 2) + + last_product = tf.einsum( + "BHQD,BHKD->BHQK", blocked_query_matrix[:, :, -1], key_layer + ) # [b, h, wm, -1] x [b, h, n, -1] ==> [b, h, wm, n] + last_product = tf.multiply(last_product, 1.0 / np.sqrt(d)) + last_product += (1.0 - to_mask) * -10000.0 + last_attn_weights = tf.nn.softmax(last_product) # [b, h, wm, n] + last_context_layer = tf.einsum( + "BHQK,BHKD->BHQD", last_attn_weights, value_layer + ) # [b, h, wm, n] x [b, h, n, -1] ==> [b, h, wm, -1] + last_context_layer = tf.expand_dims(last_context_layer, 2) + + context_layer = tf.concat( + [ + first_context_layer, + second_context_layer, + context_layer, + second_last_context_layer, + last_context_layer, + ], + 2, + ) + context_layer = tf.reshape(context_layer, (-1, h, m, d)) * from_mask + context_layer = tf.transpose(context_layer, (0, 2, 1, 3)) + return context_layer + + +class MultiHeadedAttentionLayer(tf.keras.layers.Layer): + """A multi-headed attention layer. + + It implements following types of multi-headed attention: + - original_full attention from "Attention is all you Need". + - simulated_sparse attention from BigBird with full quadratic implemention. + - block_sparse attention from BigBird with memory efficient linear impl. + """ + + def __init__( + self, + attention_type, + max_seq_length=4096, + num_attention_heads=1, + size_per_head=512, + num_rand_blocks=3, + from_seq_length=1024, + to_seq_length=1024, + from_block_size=64, + to_block_size=64, + attention_probs_dropout_prob=0.0, + initializer_range=0.02, + use_bias=True, + seed=None, + query_act=None, + key_act=None, + value_act=None, + name=None, + ): + """Constructor for a multi-headed attention layer. + + Args: + attention_type: Type of attention, needs to be one of ['original_full', + 'simulated_sparse', 'block_sparse']. + max_seq_length: (Optional) Maximum Sequence Length of model + num_attention_heads: (optional) int. Number of attention heads. + size_per_head: (optional) int. Size of each attention head. + num_rand_blocks: (optional) int. Number of random chunks per row. + from_seq_length: int. (optional) length of from sequence. + to_seq_length: int. (optional) length of to sequence. + from_block_size: (optional) int. size of block in from sequence. + to_block_size: (optional) int. size of block in to sequence. + attention_probs_dropout_prob: (optional) float. Dropout probability of the + attention probabilities. + initializer_range: (optional) float. Range of the weight initializer. + use_bias: Whether the layer uses a bias vector. + seed: (Optional) int. Reandom seed for generating random mask. + query_act: (optional) Activation function for the query transform. + key_act: (optional) Activation function for the key transform. + value_act: (optional) Activation function for the value transform. + name: The name scope of this layer. + """ + super(MultiHeadedAttentionLayer, self).__init__(name=name) + self.num_attention_heads = num_attention_heads + self.max_seq_length = max_seq_length + self.size_per_head = size_per_head + self.num_rand_blocks = num_rand_blocks + self.from_seq_length = from_seq_length + self.to_seq_length = to_seq_length + self.from_block_size = from_block_size + self.to_block_size = to_block_size + self.seed = seed + + with tf.compat.v1.variable_scope(name): + self.query_layer = utils.Dense3dLayer( + num_attention_heads, + size_per_head, + utils.create_initializer(initializer_range), + query_act, + "query", + head_first=True, + use_bias=use_bias, + ) + + self.key_layer = utils.Dense3dLayer( + num_attention_heads, + size_per_head, + utils.create_initializer(initializer_range), + key_act, + "key", + head_first=True, + use_bias=use_bias, + ) + + self.value_layer = utils.Dense3dLayer( + num_attention_heads, + size_per_head, + utils.create_initializer(initializer_range), + value_act, + "value", + head_first=True, + use_bias=use_bias, + ) + + if attention_type == "original_full": + logging.info("**** Using original full attention ****") + self.attention_dropout = recompute_grad.RecomputingDropout(attention_probs_dropout_prob) + self.attn_impl = self.original_full_attention + elif attention_type == "simulated_sparse": + logging.info("**** Using simulated sparse attention ****") + self.attention_dropout = lambda x, training=None: x + self.rand_attn = self.generate_rand_attn_list() + self.rand_block_mask = self.convert_attn_list_to_mask(self.rand_attn) + self.attn_impl = self.bigbird_simulated_attention + elif attention_type == "block_sparse": + logging.info("**** Using block sparse attention ****") + assert ( + from_seq_length // from_block_size == to_seq_length // to_block_size + ), "Error the number of blocks needs to be same!" + self.attention_dropout = None + self.rand_attn = self.generate_rand_attn_list() + self.attn_impl = self.bigbird_block_sparse_attention + else: + raise NotImplementedError("Attention type {} is not implemented".format(attention_type)) + + def generate_rand_attn_list(self): + # generate random attention and corresponding masks + if self.seed is not None: + np.random.seed(self.seed) + # old plans used in paper + if self.from_seq_length in [1024, 2048, 3072, 4096]: + rand_attn = [ + bigbird_block_rand_mask( # pylint: disable=g-complex-comprehension + self.max_seq_length, + self.max_seq_length, + self.from_block_size, + self.to_block_size, + self.num_rand_blocks, + last_idx=1024, + )[: (self.from_seq_length // self.from_block_size - 2)] + for _ in range(self.num_attention_heads) + ] + else: + plan_from_length, plan_num_rand_blocks = get_rand_attn_plan( + self.from_seq_length, self.from_block_size, self.num_rand_blocks + ) + rand_attn = bigbird_block_rand_mask_with_head( + seq_length=self.from_seq_length, + block_size=self.from_block_size, + num_heads=self.num_attention_heads, + plan_from_length=plan_from_length, + plan_num_rand_blocks=plan_num_rand_blocks, + ) + rand_attn = np.stack(rand_attn, axis=0) + return tf.constant(rand_attn, dtype=tf.int32) + + def convert_attn_list_to_mask(self, rand_attn): + temp_mask = [ + full_bigbird_mask( # pylint: disable=g-complex-comprehension + self.max_seq_length, + self.from_seq_length, + self.to_seq_length, + self.from_block_size, + self.to_block_size, + rand_attn=rand_attn[i], + ) + for i in range(self.num_attention_heads) + ] + temp_mask = np.stack(temp_mask, axis=0) + temp_mask = np.array(temp_mask, dtype=bool) + rand_block_mask = tf.constant(temp_mask, dtype=tf.bool) # [N, F, T] + return tf.cast(rand_block_mask, tf.float32) + + def original_full_attention(self, query_layer, key_layer, value_layer, masks, training=None): + """Full quadratic attention calculation. + + Args: + query_layer: float Tensor of shape [batch_size, num_attention_heads, + from_seq_length, size_per_head] + key_layer: float Tensor of shape [batch_size, num_attention_heads, + to_seq_length, size_per_head] + value_layer: float Tensor of shape [batch_size, num_attention_heads, + to_seq_length, size_per_head] + masks: a list containing float32 Tensor representing attention_mask + of shape [batch_size, from_seq_length, to_seq_length]. + The values should be 1 or 0. The attention scores will effectively be + set to -infinity for any positions in the mask that are 0, and + will be unchanged for positions that are 1. + training: Boolean indicating whether the call is training or inference. + + Returns: + float Tensor of shape [batch_size, from_seq_length, num_attention_heads, + size_per_head]. + """ + attention_mask = masks[0] + + # Directly take n^2 dot product between "query" and "key". + attention_scores = tf.einsum("BNFH,BNTH->BNFT", query_layer, key_layer) + attention_scores = tf.multiply(attention_scores, 1.0 / np.sqrt(float(self.size_per_head))) + + if attention_mask is not None: + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + adder = (1.0 - attention_mask) * -10000.0 + + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + attention_scores += adder + + # Normalize the attention scores to probabilities. + # `attention_probs` = [B, N, F, T] + attention_probs = tf.nn.softmax(attention_scores) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs = self.attention_dropout(attention_probs, training=training) + + # `context_layer` = [B, F, N, H] + context_layer = tf.einsum("BNFT,BNTH->BFNH", attention_probs, value_layer) + return context_layer + + def bigbird_simulated_attention(self, query_layer, key_layer, value_layer, masks, training=None): + """BigBird attention calculation using masks in quadratic time. + + Args: + query_layer: float Tensor of shape [batch_size, num_attention_heads, + from_seq_length, size_per_head] + key_layer: float Tensor of shape [batch_size, num_attention_heads, + to_seq_length, size_per_head] + value_layer: float Tensor of shape [batch_size, num_attention_heads, + to_seq_length, size_per_head] + masks: a list containing float32 Tensor representing attention_mask + of shape [batch_size, from_seq_length, to_seq_length]. + The values should be 1 or 0. The attention scores will effectively be + set to -infinity for any positions in the mask that are 0, and + will be unchanged for positions that are 1. + training: Boolean indicating whether the call is training or inference. + + Returns: + float Tensor of shape [batch_size, from_seq_length, num_attention_heads, + size_per_head]. + """ + attention_mask = masks[0] + rand_block_mask = tf.expand_dims(self.rand_block_mask, 0) # [1, N, F, T] + if attention_mask is not None: + attention_mask = tf.minimum(attention_mask, rand_block_mask) + else: + attention_mask = rand_block_mask + return self.original_full_attention(query_layer, key_layer, value_layer, [attention_mask], training=training) + + def bigbird_block_sparse_attention(self, query_layer, key_layer, value_layer, masks, training=None): + """BigBird attention sparse calculation using blocks in linear time. + + Args: + query_layer: float Tensor of shape [batch_size, num_attention_heads, + from_seq_length, size_per_head] + key_layer: float Tensor of shape [batch_size, num_attention_heads, + to_seq_length, size_per_head] + value_layer: float Tensor of shape [batch_size, num_attention_heads, + to_seq_length, size_per_head] + masks: A list of 5 masks used in BigBird attention at position 1 to 5. + Position 0 (first element) is not used can be left as none. In the mask, + the values should be 1 or 0. The attention scores will effectively + be set to -infinity for any positions in the mask that are 0, + and will be unchanged for positions that are 1. + "None": Not needed. + "band_mask": (optional) float32 Tensor of shape + [batch_size, 1, from_seq_length//from_block_size-4, + from_block_size, 3*to_block_size]. + "from_mask": (optional) float32 Tensor of shape + [batch_size, 1, from_seq_length, 1]. + "to_mask": (optional) float32 Tensor of shape + [batch_size, 1, 1, to_seq_length]. + "from_blocked_mask": (optional) float32 Tensor of shape + [batch_size, from_seq_length//from_block_size, from_block_size]. + Same as from_mask, just reshaped. + "to_blocked_mask": (optional) float32 Tensor of shape + [batch_size, to_seq_length//to_block_size, to_block_size]. + Same as to_mask, just reshaped.} + training: Boolean indicating whether the call is training or inference. + + Returns: + float Tensor of shape [batch_size, from_seq_length, num_attention_heads, + size_per_head]. + """ + + (_, band_mask, from_mask, to_mask, from_blocked_mask, to_blocked_mask) = masks + + return bigbird_block_sparse_attention( + query_layer, + key_layer, + value_layer, + band_mask, + from_mask, + to_mask, + from_blocked_mask, + to_blocked_mask, + self.rand_attn, + self.num_attention_heads, + self.size_per_head, + self.num_rand_blocks, + self.from_seq_length, + self.to_seq_length, + self.from_block_size, + self.to_block_size, + ) + + def call(self, from_tensor, to_tensor, masks, cache=None, decode_i=None, training=None): + """Implements a multi-headed attention layer from from_tensor to to_tensor. + + Args: + from_tensor: float Tensor of shape [batch_size, from_seq_length, + from_width] + to_tensor: float Tensor of shape [batch_size, to_seq_length, to_width]. + masks: A list of masks used in different attention. Only relevant masks + need to be supplied and at other positions place None. In the mask, + the values should be 1 or 0. The attention scores will effectively + be set to -infinity for any positions in the mask that are 0, + and will be unchanged for positions that are 1. + "attention_mask": (optional) float32 Tensor of shape + [batch_size, from_seq_length, to_seq_length]. + "band_mask": (optional) float32 Tensor of shape + [batch_size, 1, from_seq_length//from_block_size-4, + from_block_size, 3*to_block_size]. + "from_mask": (optional) float32 Tensor of shape + [batch_size, 1, from_seq_length, 1]. + "to_mask": (optional) float32 Tensor of shape + [batch_size, 1, 1, to_seq_length]. + "from_blocked_mask": (optional) float32 Tensor of shape + [batch_size, from_seq_length//from_block_size, from_block_size]. + Same as from_mask, just reshaped. + "to_blocked_mask": (optional) float32 Tensor of shape + [batch_size, to_seq_length//to_block_size, to_block_size]. + Same as to_mask, just reshaped.} + cache: (Used during prediction) A dictionary with tensors containing + results of previous attentions. The dictionary must have the items: + {"k": tensor with shape + [batch_size, max_len, num_attention_heads, size_per_head], + "v": tensor with shape + [batch_size, max_len, num_attention_heads, size_per_head]} + decode_i: (Used during prediction) current location of decoding + training: Boolean indicating whether the call is training or inference. + + Returns: + float Tensor of shape [batch_size, from_seq_length, num_attention_heads, + size_per_head]. + + Raises: + ValueError: Any of the arguments or tensor shapes are invalid. + NotImplementedError: For unknown attention type. + """ + + # Scalar dimensions referenced here: + # b = batch size (number of sequences) + # m = `from_tensor` sequence length + # n = `to_tensor` sequence length + # h = `num_attention_heads` + # d = `size_per_head` + + # `query` = [b, h, m, d] + query = self.query_layer(from_tensor) + + # `key` = [b, h, n, d] + key = self.key_layer(to_tensor) + + # `value_layer` = [b, h, n, d] + value = self.value_layer(to_tensor) + + if cache is not None and decode_i is not None: + max_len = utils.get_shape_list(cache["k"])[2] + indices_select = tf.reshape(tf.one_hot(decode_i, max_len, dtype=to_tensor.dtype), [1, 1, max_len, 1]) + key = cache["k"] + key * indices_select + value = cache["v"] + value * indices_select + cache["k"] = key + cache["v"] = value + + contextual_output = self.attn_impl(query, key, value, masks, training=training) + + return contextual_output diff --git a/pretrain/kobigbird/beam_search.py b/pretrain/kobigbird/beam_search.py new file mode 100644 index 0000000..3b6a2ca --- /dev/null +++ b/pretrain/kobigbird/beam_search.py @@ -0,0 +1,242 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Beam search branched from Pegasus. + +Original source: +https://github.com/google-research/pegasus/blob/master/pegasus/layers/beam_search.py + +This beam search implementation is designed for TPU usage only and prefers +flexibility over efficiency. Transformer attention caching is not enabled yet. + +Mostly follows implementation in T2T. Several difference to pure beamsearch: +1. has finished and alive seqs, use 2 * beam_size to grow alive seqs, + which makes beam_size=1 doesn't equal greedy. +2. prefers finished seq over alive seqs. +3. prefers lower indices when equal probability (though unlikely). +4. with custom length normalization and constraint. + +Notations: + B: batch_size, M: beam_size, T: max_decode_len, V: vocab_size, U: undefined +""" +# pylint: disable=invalid-name + +import tensorflow.compat.v2 as tf + + +def length_normalization(start, alpha, min_len, max_len, out_of_range_penalty): + r"""Create length normalization function. + + Combines length penalty from https://arxiv.org/abs/1609.08144, + and length constraint from https://www.aclweb.org/anthology/W18-2706.pdf. + + scores = \sum_j log(P_j) / ((start + lengths)/(1 + start))**alpha + + out_of_range_penalty * (length > max_len or length < min_len) + + Args: + start: int, length normalization start offset. + alpha: float, [0, 1.0], length normalization power. + min_len: int, minimum decode length. + max_len: int, maximum decode lengths. + out_of_range_penalty: float, penalty for lengths outside min len and max + len. Use a negative number that penalize out of range decodes, does hard + constraint if set to -inf. + + Returns: + fn(log_probs_BxM, length)->scores_BxM: a function to normalize sum log + probabilities of sequence with current decoding lengths. + """ + + def length_norm_fn(log_probs_BxM, length_int): + """Normalize sum log probabilities given a sequence length.""" + dtype = log_probs_BxM.dtype + norm_flt = tf.pow(((start + tf.cast(length_int, dtype)) / (1.0 + start)), alpha) + log_probs_BxM /= norm_flt + too_short_bool = tf.less(length_int, min_len) + too_long_bool = tf.logical_and(tf.greater(length_int, max_len), max_len > 0) + out_of_range_bool = tf.logical_or(too_long_bool, too_short_bool) + log_probs_BxM += out_of_range_penalty * tf.cast(out_of_range_bool, dtype) + return log_probs_BxM + + return length_norm_fn + + +def beam_search( + symbols_to_logits_fn, + init_seq_BxT, + initial_cache_BxU, + vocab_size, + beam_size, + length_norm_fn, + eos_id=1, +): + """Beam search. + + Args: + symbols_to_logits_fn: fn(seq_BxT, cache_BxU, i) -> (logits_BxV, cache_BxU) + init_seq_BxT: initial sequence ids. + initial_cache_BxU: dictionary of tensors with shape BxU. + vocab_size: vocabulary size. + beam_size: beam size. + length_norm_fn: length normalization function. + eos_id: end of sequence. + + Returns: + Tuple of (beams_BxMxT, scores_BxM). Beam searched sequences and scores. + """ + B, T = init_seq_BxT.shape + M, V = beam_size, vocab_size + dtype = tf.float32 + int_dtype = init_seq_BxT.dtype + + def _loop_body( + i, + alive_seq_BxMxT, + alive_log_probs_BxM, + alive_cache_BxMxU, + finished_seq_BxMxT, + finished_scores_BxM, + ): + """Beam search loop body.""" + # Decode one step with beam + logits_BMxV, cache_BMxU = symbols_to_logits_fn( + _flatten_beam_dim(alive_seq_BxMxT), + tf.nest.map_structure(_flatten_beam_dim, alive_cache_BxMxU), + i, + ) + logits_BxMxV = _unflatten_beam_dim(logits_BMxV, M) + new_cache_BxMxU = tf.nest.map_structure(lambda t: _unflatten_beam_dim(t, M), cache_BMxU) + + # select top 2 * beam_size and fill alive and finished. + log_probs_BxMxV = logits_BxMxV - tf.reduce_logsumexp(logits_BxMxV, axis=2, keepdims=True) + log_probs_BxMxV += tf.expand_dims(alive_log_probs_BxM, axis=2) + log_probs_BxMV = tf.reshape(log_probs_BxMxV, [B, -1]) + new_log_probs_Bx2M, topk_indices_Bx2M = tf.nn.top_k(log_probs_BxMV, k=2 * M) + topk_beam_Bx2M = topk_indices_Bx2M // V + topk_seq_Bx2MxT, new_cache_Bx2MxU = _gather_nested([alive_seq_BxMxT, new_cache_BxMxU], topk_beam_Bx2M) + topk_ids_Bx2M = topk_indices_Bx2M % V + new_seq_Bx2MxT = _update_i(topk_seq_Bx2MxT, topk_ids_Bx2M, i) + new_finished_flags_Bx2M = tf.cast(tf.reduce_any(tf.equal(new_seq_Bx2MxT, eos_id), axis=-1), dtype) + + # get new alive + _, topk_alive_indices_BxM = tf.nn.top_k(new_log_probs_Bx2M + new_finished_flags_Bx2M * dtype.min, k=M) + (alive_seq_BxMxT, alive_log_probs_BxM, alive_cache_BxMxU) = _gather_nested( + [new_seq_Bx2MxT, new_log_probs_Bx2M, new_cache_Bx2MxU], + topk_alive_indices_BxM, + ) + + # get new finished + new_scores_Bx2M = length_norm_fn(new_log_probs_Bx2M, i + 1) + new_scores_Bx2M += (1 - new_finished_flags_Bx2M) * dtype.min + finished_seq_Bx3MxT = tf.concat([finished_seq_BxMxT, new_seq_Bx2MxT], axis=1) + finished_scores_Bx3M = tf.concat([finished_scores_BxM, new_scores_Bx2M], axis=1) + _, topk_finished_indices_BxM = tf.nn.top_k(finished_scores_Bx3M, k=M) + (finished_seq_BxMxT, finished_scores_BxM) = _gather_nested( + [finished_seq_Bx3MxT, finished_scores_Bx3M], topk_finished_indices_BxM + ) + + return [ + i + 1, + alive_seq_BxMxT, + alive_log_probs_BxM, + alive_cache_BxMxU, + finished_seq_BxMxT, + finished_scores_BxM, + ] + + # initialize. + init_i = tf.constant(0, dtype=int_dtype) + init_alive_seq_BxMxT = _expand_to_beam_size(init_seq_BxT, M) + log_probs_1xM = tf.constant([[0.0] + [dtype.min] * (M - 1)], dtype=dtype) + init_alive_log_probs_BxM = tf.tile(log_probs_1xM, [B, 1]) + init_alive_cache_BxMxU = tf.nest.map_structure(lambda t: _expand_to_beam_size(t, M), initial_cache_BxU) + init_finished_seq_BxMxT = tf.zeros(tf.shape(init_alive_seq_BxMxT), int_dtype) + init_finished_scores_BxM = tf.zeros([B, M], dtype=dtype) + dtype.min + + # run loop. + ( + _, + final_alive_seq_BxMxT, + final_alive_scores_BxM, + _, + final_finished_seq_BxMxT, + final_finished_scores_BxM, + ) = tf.while_loop( + lambda *args: True, # Always do T iterations + _loop_body, + loop_vars=[ + init_i, + init_alive_seq_BxMxT, + init_alive_log_probs_BxM, + init_alive_cache_BxMxU, + init_finished_seq_BxMxT, + init_finished_scores_BxM, + ], + parallel_iterations=1, + back_prop=False, + maximum_iterations=T, + ) + + # process finished. + final_finished_flag_BxMx1 = tf.reduce_any(tf.equal(final_finished_seq_BxMxT, eos_id), axis=-1, keepdims=True) + final_seq_BxMxT = tf.where( + tf.tile(final_finished_flag_BxMx1, [1, 1, T]), + final_finished_seq_BxMxT, + final_alive_seq_BxMxT, + ) + final_scores_BxM = tf.where( + tf.squeeze(final_finished_flag_BxMx1, axis=-1), + final_finished_scores_BxM, + final_alive_scores_BxM, + ) + return final_seq_BxMxT, final_scores_BxM + + +def _update_i(tensor_BxNxT, updates_BxN, i): + B, N, T = tensor_BxNxT.shape + tensor_BNxT = tf.reshape(tensor_BxNxT, [-1, T]) + updates_BN = tf.reshape(updates_BxN, [-1]) + batch_BN = tf.range(B * N, dtype=tf.int32) + i_BN = tf.fill([B * N], i) + ind_BNx2 = tf.stack([batch_BN, i_BN], axis=-1) + tensor_BNxT = tf.tensor_scatter_nd_update(tensor_BNxT, ind_BNx2, updates_BN) + return tf.reshape(tensor_BNxT, [B, N, T]) + + +def _expand_to_beam_size(tensor_BxU, beam_size): + tensor_Bx1xU = tf.expand_dims(tensor_BxU, axis=1) + tile_dims = [1] * tensor_Bx1xU.shape.ndims + tile_dims[1] = beam_size + tensor_BxMxU = tf.tile(tensor_Bx1xU, tile_dims) + return tensor_BxMxU + + +def _flatten_beam_dim(tensor_BxMxU): + shape = tensor_BxMxU.shape.as_list() + tensor_BMxU = tf.reshape(tensor_BxMxU, [shape[0] * shape[1]] + shape[2:]) + return tensor_BMxU + + +def _unflatten_beam_dim(tensor_BMxU, M): + shape = tensor_BMxU.shape.as_list() + tensor_BxMxU = tf.reshape(tensor_BMxU, [shape[0] // M, M] + shape[1:]) + return tensor_BxMxU + + +def _gather_nested(nested_BxMxU, indices_BxN): + def _gather_beam(tensor_BxMxU): + tensor_BxNxU = tf.gather(tensor_BxMxU, indices_BxN, batch_dims=1, axis=1) + return tensor_BxNxU + + return tf.nest.map_structure(_gather_beam, nested_BxMxU) diff --git a/pretrain/kobigbird/decoder.py b/pretrain/kobigbird/decoder.py new file mode 100644 index 0000000..e3608f8 --- /dev/null +++ b/pretrain/kobigbird/decoder.py @@ -0,0 +1,671 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""BigBird Decoder Layers.""" + +import tensorflow.compat.v2 as tf +from kobigbird import attention, beam_search, recompute_grad, utils + + +class PrenormDecoderLayer(tf.keras.layers.Layer): + """Decoder layer of a transformer in Pegasus style. + + The layer_norm is taken before self-attention. + """ + + def __init__( + self, + max_seq_length=4096, + hidden_size=768, + intermediate_size=3072, + intermediate_act_fn=utils.gelu, + attention_probs_dropout_prob=0.0, + hidden_dropout_prob=0.1, + initializer_range=0.02, + num_attention_heads=12, + use_bias=True, + name=None, + ): + """Constructor of a decoder layer of a transformer in Pegasus style. + + Args: + hidden_size: (optional) int. Size of hidden dimension. + intermediate_size: (optional) int. Size of intermediate dimension. + intermediate_act_fn: optional) Activation function for intermediate layer. + attention_probs_dropout_prob: (optional) float. Dropout probability of the + attention probabilities. + hidden_dropout_prob: (optional) float. Dropout probability of the + attention. + initializer_range: (optional) float. Range of the weight initializer. + num_attention_heads: (optional) int. Number of attention heads. + use_bias: (optional) bool. Whether key/query/value uses a bias vector. + name: The name scope of this layer. + """ + super(PrenormDecoderLayer, self).__init__(name=name) + + with tf.compat.v1.variable_scope(name): + + attention_head_size = hidden_size // num_attention_heads + with tf.compat.v1.variable_scope("attention"): + # Pre-Normalization layer + with tf.compat.v1.variable_scope("self"): + self.first_layer_norm = utils.NormLayer(hidden_size) + # Self-Attention layer + self.self_attn_layer = attention.MultiHeadedAttentionLayer( + "original_full", + max_seq_length=max_seq_length, + use_bias=use_bias, + name="self", + num_attention_heads=num_attention_heads, + size_per_head=attention_head_size, + initializer_range=initializer_range, + attention_probs_dropout_prob=attention_probs_dropout_prob, + ) + # Feedforward layer + with tf.compat.v1.variable_scope("output"): + self.self_proj_layer = utils.Dense3dProjLayer( + num_attention_heads, + attention_head_size, + utils.create_initializer(initializer_range), + None, + "dense", + use_bias, + ) + # Dropout + self.self_attn_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + # Pre-Normalization layer + with tf.compat.v1.variable_scope("encdec"): + self.second_layer_norm = utils.NormLayer(hidden_size) + # Cross-Attention layer + self.cross_attn_layer = attention.MultiHeadedAttentionLayer( + "original_full", + max_seq_length=max_seq_length, + use_bias=use_bias, + name="encdec", + num_attention_heads=num_attention_heads, + size_per_head=attention_head_size, + initializer_range=initializer_range, + attention_probs_dropout_prob=attention_probs_dropout_prob, + ) + # Feedforward layer + with tf.compat.v1.variable_scope("encdec_output"): + self.cross_proj_layer = utils.Dense3dProjLayer( + num_attention_heads, + attention_head_size, + utils.create_initializer(initializer_range), + None, + "dense", + use_bias, + ) + # Dropout + self.cross_attn_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + + with tf.compat.v1.variable_scope("intermediate"): + # Normalization layer + self.third_layer_norm = utils.NormLayer(hidden_size) + # Feedforward layer + self.expand_layer = utils.Dense2dLayer( + hidden_size, + intermediate_size, + utils.create_initializer(initializer_range), + intermediate_act_fn, + "dense", + ) + + with tf.compat.v1.variable_scope("output"): + # Feedforward layer + self.contract_layer = utils.Dense2dLayer( + intermediate_size, + hidden_size, + utils.create_initializer(initializer_range), + None, + "dense", + ) + # Dropout + self.output_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + + def call( + self, + layer_input, + encoder_outputs, + self_attention_mask, + attention_mask, + cache=None, + decode_i=None, + training=None, + ): + """Implements a decoder layer of a transformer in Pegasus style. + + The layer_norm is taken after self-attention. + + Args: + layer_input: float Tensor of shape [batch_size, seq_length, hidden_size]. + encoder_outputs: tensors with shape [batch_size, input_length, + num_hidden_layers, hidden_size] + self_attention_mask: bias for decoder self-attention layer. [1, 1, + target_length, target_length] + attention_mask: bias for encoder-decoder attention layer. [batch_size, 1, + 1, input_length] + cache: (Used during prediction) A dictionary with tensors containing + results of previous attentions. The dictionary must have the items: + {"k": tensor with shape + [batch_size, max_len, num_attention_heads, size_per_head], + "v": tensor with shape + [batch_size, max_len, num_attention_heads, size_per_head]} + decode_i: (Used during prediction) current location of decoding + training: Boolean indicating whether the call is training or inference. + + Returns: + float Tensor of shape [batch_size, seq_length, hidden_size]. + + Raises: + ValueError: Any of the arguments or tensor shapes are invalid. + NotImplementedError: For unknown attention type. + """ + # self-attention + normalized_layer_input = self.first_layer_norm(layer_input) + self_attention_output = self.self_attn_layer( + normalized_layer_input, + normalized_layer_input, + [self_attention_mask], + cache=cache, + decode_i=decode_i, + training=training, + ) + + # Run a linear projection of `hidden_size` then add a residual + # with `layer_input`. + self_attention_output = self.self_proj_layer(self_attention_output) + self_attention_output = self.self_attn_dropout(self_attention_output, training=training) + self_attention_output = self_attention_output + layer_input + + # Cross-attention + normalized_self_attention_output = self.second_layer_norm(self_attention_output) + attention_output = self.cross_attn_layer( + normalized_self_attention_output, + encoder_outputs, + [attention_mask], + training=training, + ) + + # Run a linear projection of `hidden_size` then add a residual + # with `layer_input`. + attention_output = self.cross_proj_layer(attention_output) + attention_output = self.cross_attn_dropout(attention_output, training=training) + attention_output = attention_output + self_attention_output + + # The activation is only applied to the "intermediate" hidden layer. + normalized_attention_output = self.third_layer_norm(attention_output) + intermediate_output = self.expand_layer(normalized_attention_output) + + # Down-project back to `hidden_size` then add the residual. + layer_output = self.contract_layer(intermediate_output) + layer_output = self.output_dropout(layer_output, training=training) + layer_output = layer_output + attention_output + return layer_output + + +class PostnormDecoderLayer(tf.keras.layers.Layer): + """Decoder layer of a transformer in BERT style. + + The layer_norm is taken before self-attention. + """ + + def __init__( + self, + max_seq_length=4096, + hidden_size=768, + intermediate_size=3072, + intermediate_act_fn=utils.gelu, + attention_probs_dropout_prob=0.0, + hidden_dropout_prob=0.1, + initializer_range=0.02, + num_attention_heads=12, + use_bias=True, + name=None, + ): + """Constructor of a decoder layer of a transformer in BERT style. + + Args: + hidden_size: (optional) int. Size of hidden dimension. + intermediate_size: (optional) int. Size of intermediate dimension. + intermediate_act_fn: optional) Activation function for intermediate layer. + attention_probs_dropout_prob: (optional) float. Dropout probability of the + attention probabilities. + hidden_dropout_prob: (optional) float. Dropout probability of the + attention. + initializer_range: (optional) float. Range of the weight initializer. + num_attention_heads: (optional) int. Number of attention heads. + use_bias: (optional) bool. Whether key/query/value uses a bias vector. + name: The name scope of this layer. + """ + super(PostnormDecoderLayer, self).__init__(name=name) + + with tf.compat.v1.variable_scope(name): + + attention_head_size = hidden_size // num_attention_heads + with tf.compat.v1.variable_scope("attention"): + # Self-Attention layers + self.self_attn_layer = attention.MultiHeadedAttentionLayer( + "original_full", + max_seq_length=max_seq_length, + use_bias=use_bias, + name="self", + num_attention_heads=num_attention_heads, + size_per_head=attention_head_size, + initializer_range=initializer_range, + attention_probs_dropout_prob=attention_probs_dropout_prob, + ) + + with tf.compat.v1.variable_scope("output"): + # Feedforward layer + self.self_proj_layer = utils.Dense3dProjLayer( + num_attention_heads, + attention_head_size, + utils.create_initializer(initializer_range), + None, + "dense", + use_bias, + ) + # Post-Normalization layer + self.first_layer_norm = utils.NormLayer(hidden_size) + # Dropout + self.self_attn_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + + # Cross-Attention layers + self.cross_attn_layer = attention.MultiHeadedAttentionLayer( + "original_full", + max_seq_length=max_seq_length, + use_bias=use_bias, + name="encdec", + num_attention_heads=num_attention_heads, + size_per_head=attention_head_size, + initializer_range=initializer_range, + attention_probs_dropout_prob=attention_probs_dropout_prob, + ) + + with tf.compat.v1.variable_scope("encdec_output"): + # Feedforward layer + self.cross_proj_layer = utils.Dense3dProjLayer( + num_attention_heads, + attention_head_size, + utils.create_initializer(initializer_range), + None, + "dense", + use_bias, + ) + # Post-Normalization layer + self.second_layer_norm = utils.NormLayer(hidden_size) + # Dropout + self.cross_attn_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + + with tf.compat.v1.variable_scope("intermediate"): + # Feedforward layer + self.expand_layer = utils.Dense2dLayer( + hidden_size, + intermediate_size, + utils.create_initializer(initializer_range), + intermediate_act_fn, + "dense", + ) + + with tf.compat.v1.variable_scope("output"): + # Feedforward layer + self.contract_layer = utils.Dense2dLayer( + intermediate_size, + hidden_size, + utils.create_initializer(initializer_range), + None, + "dense", + ) + # Normalization layer + self.third_layer_norm = utils.NormLayer(hidden_size) + # Dropout + self.output_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + + def call( + self, + layer_input, + encoder_outputs, + self_attention_mask, + attention_mask, + cache=None, + decode_i=None, + training=None, + ): + """Implements a decoder layer of a transformer in BERT style. + + The layer_norm is taken after self-attention. + + Args: + layer_input: float Tensor of shape [batch_size, seq_length, hidden_size]. + encoder_outputs: tensors with shape [batch_size, input_length, + num_hidden_layers, hidden_size] + self_attention_mask: bias for decoder self-attention layer. [1, 1, + target_length, target_length] + attention_mask: bias for encoder-decoder attention layer. [batch_size, 1, + 1, input_length] + cache: (Used during prediction) A dictionary with tensors containing + results of previous attentions. The dictionary must have the items: + {"k": tensor with shape + [batch_size, max_len, num_attention_heads, size_per_head], + "v": tensor with shape + [batch_size, max_len, num_attention_heads, size_per_head]} + decode_i: (Used during prediction) current location of decoding + training: Boolean indicating whether the call is training or inference. + + Returns: + float Tensor of shape [batch_size, seq_length, hidden_size]. + + Raises: + ValueError: Any of the arguments or tensor shapes are invalid. + NotImplementedError: For unknown attention type. + """ + # self-attention + self_attention_output = self.self_attn_layer( + layer_input, + layer_input, + [self_attention_mask], + cache=cache, + decode_i=decode_i, + training=training, + ) + + # Run a linear projection of `hidden_size` then add a residual + # with `layer_input`. + self_attention_output = self.self_proj_layer(self_attention_output) + self_attention_output = self.self_attn_dropout(self_attention_output, training=training) + self_attention_output = self.first_layer_norm(self_attention_output + layer_input) + + # cross-attention + attention_output = self.cross_attn_layer( + self_attention_output, encoder_outputs, [attention_mask], training=training + ) + + # Run a linear projection of `hidden_size` then add a residual + # with `layer_input`. + attention_output = self.cross_proj_layer(attention_output) + attention_output = self.cross_attn_dropout(attention_output, training=training) + attention_output = self.second_layer_norm(attention_output + self_attention_output) + + # The activation is only applied to the "intermediate" hidden layer. + intermediate_output = self.expand_layer(attention_output) + + # Down-project back to `hidden_size` then add the residual. + layer_output = self.contract_layer(intermediate_output) + layer_output = self.output_dropout(layer_output, training=training) + layer_output = self.third_layer_norm(layer_output + attention_output) + return layer_output + + +def add_gradient_recomputation(original_class): + """Creats a subclass which enables gradient checkpointing.""" + + class RecomputeLayer(original_class): + """Transformer layer that recomputes the forward pass during backprop.""" + + def call( + self, + layer_input, + encoder_outputs, + self_attention_mask, + attention_mask, + cache=None, + decode_i=None, + training=None, + ): + def f(layer_input, encoder_outputs): + x = super(RecomputeLayer, self).call( + layer_input, + encoder_outputs, + self_attention_mask, + attention_mask, + cache, + decode_i, + training=training, + ) + return x + + f = recompute_grad.recompute_grad(f) + + return f(layer_input, encoder_outputs) + + return RecomputeLayer + + +class DecoderStack(tf.keras.layers.Layer): + """Transformer decoder stack.""" + + def __init__(self, params): + if params["couple_encoder_decoder"]: + name = "encoder" + super(DecoderStack, self).__init__(name=name) + else: + name = "decoder" + super(DecoderStack, self).__init__(name=name) + + self.params = params + + if params["norm_type"] == "prenorm": + decoder_class = PrenormDecoderLayer + elif params["norm_type"] == "postnorm": + decoder_class = PostnormDecoderLayer + else: + raise NotImplementedError("Norm type {} is not implemented".format(params["norm_type"])) + + if params["use_gradient_checkpointing"]: + decoder_class = add_gradient_recomputation(decoder_class) + + if self.params.get("num_decoder_layers", None) is not None: + num_hidden_layers = self.params["num_decoder_layers"] + else: + num_hidden_layers = self.params["num_hidden_layers"] + + with tf.compat.v1.variable_scope(name): + # Decoder layers + self.decoder_layers = [ + decoder_class( # pylint: disable=g-complex-comprehension + self.params["max_position_embeddings"], + self.params["hidden_size"], + self.params["intermediate_size"], + utils.get_activation(self.params["hidden_act"]), + self.params["attention_probs_dropout_prob"], + self.params["hidden_dropout_prob"], + self.params["initializer_range"], + self.params["num_attention_heads"], + self.params["use_bias"], + name="layer_%d" % layer_idx, + ) + for layer_idx in range(num_hidden_layers) + ] + + # Normalization layer + self.layer_norm = utils.NormLayer(self.params["hidden_size"]) + + def call( + self, + decoder_inputs, + self_attention_mask, + encoder_outputs, + encoder_mask, + cache=None, + decode_i=None, + training=None, + ): + """Return the output of the decoder layer stacks. + + Args: + decoder_inputs: tensor with shape + [batch_size, target_length, hidden_size] + self_attention_mask: bias for decoder self-attention layer. [1, 1, + target_length, target_length] + encoder_outputs: tensors with shape [batch_size, input_length, + hidden_size] + encoder_mask: bias for encoder-decoder attention layer. [batch_size, + input_length] + cache: (Used during prediction) A dictionary with tensors containing + results of previous attentions. The dictionary must have the items: + {"k": tensor with shape + [batch_size, max_len, num_attention_heads, size_per_head], + "v": tensor with shape + [batch_size, max_len, num_attention_heads, size_per_head]} + decode_i: (Used during prediction) current location of decoding. + training: Boolean indicating whether the call is training or inference. + + Returns: + Output of decoder layer stack. A float32 tensor with shape [batch_size, + target_length, hidden_size] + """ + # Expand encoder mask to broadcast over num heads and from_seq axis + attention_mask = tf.expand_dims(tf.expand_dims(encoder_mask, 1), 1) + attention_mask = tf.cast(attention_mask, tf.float32) + + if self.params["norm_type"] == "postnorm": + decoder_inputs = self.layer_norm(decoder_inputs) + + layer_output = decoder_inputs + for layer in self.decoder_layers: + layer_cache = cache[layer.name] if cache is not None else None + layer_output = layer( + layer_output, + encoder_outputs, + self_attention_mask, + attention_mask, + layer_cache, + decode_i, + training=training, + ) + + if self.params["norm_type"] == "prenorm": + layer_output = self.layer_norm(layer_output) + + return layer_output + + +def create_self_attention_mask(length): + with tf.name_scope("decoder_self_attention_mask"): + valid_locs = tf.linalg.band_part(tf.ones([length, length]), -1, 0) + valid_locs = tf.reshape(valid_locs, [1, 1, length, length]) + return valid_locs + + +def inplace_update_i(inp_tensor, updates, i): + """Inplace update a tensor. B: batch_size, L: tensor length.""" + batch_size = inp_tensor.shape[0] + indices = tf.stack( + [ + tf.range(batch_size, dtype=tf.int32), + tf.fill([batch_size], tf.cast(i, tf.int32)), + ], + axis=-1, + ) + return tf.tensor_scatter_nd_update(inp_tensor, indices, updates) + + +# pylint: disable=invalid-name +def left2right_decode( + symbols_to_logits_fn, + start_symbols, + context_BxU_dict, + batch_size, + max_decode_len, + vocab_size, + beam_size=1, + beam_start=5, + beam_alpha=0.6, + beam_min=0, + beam_max=-1, + eos_id=1, +): + """left to right decode. + + Notations: + B: batch_size, V: vocab_size, T: decode_len, U: undefined dimensions + + Args: + symbols_to_logits_fn: logits = fn(decodes, context, i). Shoud take + [batch_size, decoded_ids] and return [batch_size, vocab_size]. + start_symbols: starting ids [batch_size] + context_BxU_dict: dict of Tensors. + batch_size: int, decode batch size. + max_decode_len: int, maximum number of steps to decode. + vocab_size: int, output vocab size. + beam_size: Number of beams to decode. + beam_start: start length for scaling, default to 5. + beam_alpha: Length penalty for decoding. Should be between 0 (shorter) and 1 + (longer), default to 0.6. + beam_min: Minimum beam search lengths. + beam_max: Maximum beam search lengths. Set -1 to use unlimited. + eos_id: end of token id, default to 1. + + Returns: + decodes: Tensor[batch, decode_len] + """ + dtype = tf.int32 + start_symbols = tf.expand_dims(start_symbols, 1) + # When beam_size=1, beam_search does not behave exactly like greedy. + # This is due to using 2 * beam_size in grow_topk, and keep the top beam_size + # ones that haven't reached EOS into alive. + # In this case, alpha value for length penalty will take effect. + if beam_size == 1: + + def decode_loop(i, decodes_BxT, cache_BxU_dict): + logits_BxV = symbols_to_logits_fn(decodes_BxT, cache_BxU_dict, i) + decodes_BxT = inplace_update_i(decodes_BxT, tf.argmax(logits_BxV, -1, output_type=tf.int32), i) + return i + 1, decodes_BxT, cache_BxU_dict + + def loop_cond(i, decodes_BxT, unused_cache_BxU_dict): + finished_B = tf.reduce_any(tf.equal(decodes_BxT, eos_id), axis=1) + return tf.logical_and(i < max_decode_len, tf.logical_not(tf.reduce_all(finished_B))) + + init_dec_BxT = tf.concat( + [ + tf.cast(start_symbols, dtype=dtype), + tf.zeros([batch_size, max_decode_len - 1], dtype=dtype), + ], + axis=1, + ) + _, decodes, _ = tf.while_loop( + loop_cond, + decode_loop, + [tf.constant(0, dtype=dtype), init_dec_BxT, context_BxU_dict], + ) + return decodes + + else: + + def symbols_to_logits_fn_with_sampling(decodes_BxT, states_BxU_dict, i): + logits_BxV = symbols_to_logits_fn(decodes_BxT, states_BxU_dict, i) + return logits_BxV, states_BxU_dict + + length_norm_fn = beam_search.length_normalization(beam_start, beam_alpha, beam_min, beam_max, -1e3) + + init_dec_BxT = tf.concat( + [ + tf.cast(start_symbols, dtype=tf.int32), + tf.zeros([batch_size, max_decode_len - 1], dtype=tf.int32), + ], + axis=1, + ) + + beams, _ = beam_search.beam_search( + symbols_to_logits_fn_with_sampling, + init_dec_BxT, + context_BxU_dict, + vocab_size, + beam_size, + length_norm_fn, + eos_id, + ) + return beams[:, 0, :] diff --git a/pretrain/kobigbird/encoder.py b/pretrain/kobigbird/encoder.py new file mode 100644 index 0000000..fc25e68 --- /dev/null +++ b/pretrain/kobigbird/encoder.py @@ -0,0 +1,540 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""BigBird Encoder Layers.""" + +import tensorflow.compat.v2 as tf +from kobigbird import attention, recompute_grad, utils + + +class PrenormEncoderLayer(tf.keras.layers.Layer): + """Encoder layer of a transformer in Pegasus style. + + The layer_norm is taken before self-attention. + """ + + def __init__( + self, + attention_type, + max_seq_length=4096, + hidden_size=768, + intermediate_size=3072, + intermediate_act_fn=utils.gelu, + attention_probs_dropout_prob=0.0, + hidden_dropout_prob=0.1, + initializer_range=0.02, + num_attention_heads=12, + num_rand_blocks=3, + seq_length=1024, + block_size=64, + use_bias=True, + seed=None, + name=None, + ): + """Constructor of an encoder layer of a transformer in Pegasus style. + + Args: + attention_type: Type of attention, needs to be one of ['original_full', + 'simulated_sparse', 'block_sparse']. + max_seq_length: (Optional) Maximum Sequence Length of model + hidden_size: (optional) int. Size of hidden dimension. + intermediate_size: (optional) int. Size of intermediate dimension. + intermediate_act_fn: optional) Activation function for intermediate layer. + attention_probs_dropout_prob: (optional) float. Dropout probability of the + attention probabilities. + hidden_dropout_prob: (optional) float. Dropout probability of the + attention. + initializer_range: (optional) float. Range of the weight initializer. + num_attention_heads: (optional) int. Number of attention heads. + num_rand_blocks: (optional) int. Number of random chunks per row. + seq_length: (optional) int. length of sequence. + block_size: (optional) int. size of block in sequence. + use_bias: (optional) bool. Whether key/query/value uses a bias vector. + seed: (Optional) int. Reandom seed for generating random mask. + name: The name scope of this layer. + """ + super(PrenormEncoderLayer, self).__init__(name=name) + + with tf.compat.v1.variable_scope(name): + + attention_head_size = hidden_size // num_attention_heads + with tf.compat.v1.variable_scope("attention"): + # Pre-Normalization layer + with tf.compat.v1.variable_scope("self"): + self.first_layer_norm = utils.NormLayer(hidden_size) + # Self-Attention layer + self.attn_layer = attention.MultiHeadedAttentionLayer( + attention_type, + max_seq_length, + num_attention_heads, + attention_head_size, + num_rand_blocks, + seq_length, + seq_length, + block_size, + block_size, + attention_probs_dropout_prob, + initializer_range, + use_bias, + seed, + name="self", + ) + # Feedforward layer + with tf.compat.v1.variable_scope("output"): + self.projection_layer = utils.Dense3dProjLayer( + num_attention_heads, + attention_head_size, + utils.create_initializer(initializer_range), + None, + "dense", + use_bias, + ) + # Dropout + self.attention_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + + with tf.compat.v1.variable_scope("intermediate"): + # Normalization layer + self.second_layer_norm = utils.NormLayer(hidden_size) + # Feedforward layer + self.expand_layer = utils.Dense2dLayer( + hidden_size, + intermediate_size, + utils.create_initializer(initializer_range), + intermediate_act_fn, + "dense", + ) + with tf.compat.v1.variable_scope("output"): + # Feedforward layer + self.contract_layer = utils.Dense2dLayer( + intermediate_size, + hidden_size, + utils.create_initializer(initializer_range), + None, + "dense", + ) + # Dropout + self.output_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + + def call( + self, + layer_input, + attention_mask=None, + band_mask=None, + from_mask=None, + to_mask=None, + input_blocked_mask=None, + training=None, + ): + """Implements a encoder layer of a transformer in Pegasus style. + + Args: + layer_input: float Tensor of shape [batch_size, seq_length, hidden_size]. + attention_mask: (optional) float32 Tensor of shape [batch_size, + seq_length, seq_length]. The values should be 1 or 0. The + attention scores will effectively be set to -infinity for any positions + in the mask that are 0, and will be unchanged for positions that are 1. + band_mask: (optional) float32 Tensor of shape [batch_size, 1, + seq_length//block_size-4, block_size, 3*block_size]. + The values should be 1 or 0. The attention scores will effectively be + set to -infinity for any positions in the mask that are 0, and will be + unchanged for positions that are 1. + from_mask: (optional) float32 Tensor of shape [batch_size, 1, + seq_length, 1]. The values should be 1 or 0. The + attention scores will effectively be set to -infinity for any positions + in the mask that are 0, and will be unchanged for positions that are 1. + to_mask: (optional) float32 Tensor of shape [batch_size, 1, 1, + seq_length]. The values should be 1 or 0. The + attention scores will effectively be set to -infinity for any positions + in the mask that are 0, and will be unchanged for positions that are 1. + input_blocked_mask: (optional) float32 Tensor of shape [batch_size, + seq_length//block_size, block_size]. Same as from/to_mask, just + reshaped. + training: Boolean indicating whether the call is training or inference. + + Returns: + float Tensor of shape [batch_size, seq_length, hidden_size]. + + Raises: + ValueError: Any of the arguments or tensor shapes are invalid. + NotImplementedError: For unknown attention type. + """ + # self-attention + normalized_layer_input = self.first_layer_norm(layer_input) + attention_output = self.attn_layer( + normalized_layer_input, + normalized_layer_input, + [ + attention_mask, + band_mask, + from_mask, + to_mask, + input_blocked_mask, + input_blocked_mask, + ], + training=training, + ) + + # Run a linear projection of `hidden_size` then add a residual + # with `layer_input`. + attention_output = self.projection_layer(attention_output) + attention_output = self.attention_dropout(attention_output, training=training) + attention_output = attention_output + layer_input + + # The activation is only applied to the "intermediate" hidden layer. + normalized_attention_output = self.second_layer_norm(attention_output) + intermediate_output = self.expand_layer(normalized_attention_output) + + # Down-project back to `hidden_size` then add the residual. + layer_output = self.contract_layer(intermediate_output) + layer_output = self.output_dropout(layer_output, training=training) + layer_output = layer_output + attention_output + return layer_output + + +class PostnormEncoderLayer(tf.keras.layers.Layer): + """Encoder layer of a transformer in BERT style. + + The layer_norm is taken after self-attention. + """ + + def __init__( + self, + attention_type, + max_seq_length=4096, + hidden_size=768, + intermediate_size=3072, + intermediate_act_fn=utils.gelu, + attention_probs_dropout_prob=0.0, + hidden_dropout_prob=0.1, + initializer_range=0.02, + num_attention_heads=12, + num_rand_blocks=3, + seq_length=1024, + block_size=64, + use_bias=True, + seed=None, + name=None, + ): + """Constructor of an encoder layer of a transformer in BERT style. + + Args: + attention_type: Type of attention, needs to be one of ['original_full', + 'simulated_sparse', 'block_sparse']. + max_seq_length: (Optional) Maximum Sequence Length of model + hidden_size: (optional) int. Size of hidden dimension. + intermediate_size: (optional) int. Size of intermediate dimension. + intermediate_act_fn: optional) Activation function for intermediate layer. + attention_probs_dropout_prob: (optional) float. Dropout probability of the + attention probabilities. + hidden_dropout_prob: (optional) float. Dropout probability of the + attention. + initializer_range: (optional) float. Range of the weight initializer. + num_attention_heads: (optional) int. Number of attention heads. + num_rand_blocks: (optional) int. Number of random chunks per row. + seq_length: (optional) int. length of sequence. + block_size: (optional) int. size of block in sequence. + use_bias: (optional) bool. Whether key/query/value uses a bias vector. + seed: (Optional) int. Reandom seed for generating random mask. + name: The name scope of this layer. + """ + super(PostnormEncoderLayer, self).__init__(name=name) + + with tf.compat.v1.variable_scope(name): + + attention_head_size = hidden_size // num_attention_heads + with tf.compat.v1.variable_scope("attention"): + # Self-Attention layer + self.attn_layer = attention.MultiHeadedAttentionLayer( + attention_type, + max_seq_length, + num_attention_heads, + attention_head_size, + num_rand_blocks, + seq_length, + seq_length, + block_size, + block_size, + attention_probs_dropout_prob, + initializer_range, + use_bias, + seed, + name="self", + ) + + with tf.compat.v1.variable_scope("output"): + # Feedforward layer + self.projection_layer = utils.Dense3dProjLayer( + num_attention_heads, + attention_head_size, + utils.create_initializer(initializer_range), + None, + "dense", + use_bias, + ) + # Post-Normalization layer + self.first_layer_norm = utils.NormLayer(hidden_size) + # Dropout + self.attention_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + + with tf.compat.v1.variable_scope("intermediate"): + # Feedforward layer + self.expand_layer = utils.Dense2dLayer( + hidden_size, + intermediate_size, + utils.create_initializer(initializer_range), + intermediate_act_fn, + "dense", + ) + + with tf.compat.v1.variable_scope("output"): + # Feedforward layer + self.contract_layer = utils.Dense2dLayer( + intermediate_size, + hidden_size, + utils.create_initializer(initializer_range), + None, + "dense", + ) + # Normalization layer + self.second_layer_norm = utils.NormLayer(hidden_size) + # Dropout + self.output_dropout = recompute_grad.RecomputingDropout(hidden_dropout_prob) + + def call( + self, + layer_input, + attention_mask=None, + band_mask=None, + from_mask=None, + to_mask=None, + input_blocked_mask=None, + training=None, + ): + """Implements a encoder layer of a transformer in BERT style. + + Args: + layer_input: float Tensor of shape [batch_size, seq_length, hidden_size]. + attention_mask: (optional) float32 Tensor of shape [batch_size, + seq_length, seq_length]. The values should be 1 or 0. The + attention scores will effectively be set to -infinity for any positions + in the mask that are 0, and will be unchanged for positions that are 1. + band_mask: (optional) float32 Tensor of shape [batch_size, 1, + seq_length//block_size-4, block_size, 3*block_size]. + The values should be 1 or 0. The attention scores will effectively be + set to -infinity for any positions in the mask that are 0, and will be + unchanged for positions that are 1. + from_mask: (optional) float32 Tensor of shape [batch_size, 1, + seq_length, 1]. The values should be 1 or 0. The + attention scores will effectively be set to -infinity for any positions + in the mask that are 0, and will be unchanged for positions that are 1. + to_mask: (optional) float32 Tensor of shape [batch_size, 1, 1, + seq_length]. The values should be 1 or 0. The + attention scores will effectively be set to -infinity for any positions + in the mask that are 0, and will be unchanged for positions that are 1. + input_blocked_mask: (optional) float32 Tensor of shape [batch_size, + seq_length//block_size, block_size]. Same as from/to_mask, just + reshaped. + training: Boolean indicating whether the call is training or inference. + + Returns: + float Tensor of shape [batch_size, seq_length, hidden_size]. + + Raises: + ValueError: Any of the arguments or tensor shapes are invalid. + NotImplementedError: For unknown attention type. + """ + # self-attention + attention_output = self.attn_layer( + layer_input, + layer_input, + [ + attention_mask, + band_mask, + from_mask, + to_mask, + input_blocked_mask, + input_blocked_mask, + ], + training=training, + ) + + # Run a linear projection of `hidden_size` then add a residual + # with `layer_input`. + attention_output = self.projection_layer(attention_output) + attention_output = self.attention_dropout(attention_output, training=training) + attention_output = self.first_layer_norm(attention_output + layer_input) + + # The activation is only applied to the "intermediate" hidden layer. + intermediate_output = self.expand_layer(attention_output) + + # Down-project back to `hidden_size` then add the residual. + layer_output = self.contract_layer(intermediate_output) + layer_output = self.output_dropout(layer_output, training=training) + layer_output = self.second_layer_norm(layer_output + attention_output) + return layer_output + + +def add_gradient_recomputation(original_class): + """Creats a subclass which enables gradient checkpointing.""" + + class RecomputeLayer(original_class): + """Transformer layer that recomputes the forward pass during backprop.""" + + def call( + self, + layer_input, + attention_mask=None, + band_mask=None, + from_mask=None, + to_mask=None, + input_blocked_mask=None, + training=None, + ): + def f( + layer_input, + attention_mask, + band_mask, + from_mask, + to_mask, + input_blocked_mask, + ): + x = super(RecomputeLayer, self).call( + layer_input, + attention_mask, + band_mask, + from_mask, + to_mask, + input_blocked_mask, + training=training, + ) + return x + + f = recompute_grad.recompute_grad(f) + + return f( + layer_input, + attention_mask, + band_mask, + from_mask, + to_mask, + input_blocked_mask, + ) + + return RecomputeLayer + + +class EncoderStack(tf.keras.layers.Layer): + """Transformer encoder stack.""" + + def __init__(self, params): + name = "encoder" + super(EncoderStack, self).__init__(name=name) + self.params = params + + if params["norm_type"] == "prenorm": + encoder_class = PrenormEncoderLayer + elif params["norm_type"] == "postnorm": + encoder_class = PostnormEncoderLayer + else: + raise NotImplementedError("Norm type {} is not implemented".format(params["norm_type"])) + + if params["use_gradient_checkpointing"]: + encoder_class = add_gradient_recomputation(encoder_class) + + with tf.compat.v1.variable_scope(name): + # Encoder layers + self.encoder_layers = [ + encoder_class( # pylint: disable=g-complex-comprehension + self.params["attention_type"], + self.params["max_position_embeddings"], + self.params["hidden_size"], + self.params["intermediate_size"], + utils.get_activation(self.params["hidden_act"]), + self.params["attention_probs_dropout_prob"], + self.params["hidden_dropout_prob"], + self.params["initializer_range"], + self.params["num_attention_heads"], + self.params["num_rand_blocks"], + self.params["max_encoder_length"], + self.params["block_size"], + self.params["use_bias"], + seed=layer_idx, + name="layer_%d" % layer_idx, + ) + for layer_idx in range(self.params["num_hidden_layers"]) + ] + + # Normalization layer + self.layer_norm = utils.NormLayer(self.params["hidden_size"]) + + def call(self, encoder_inputs, encoder_inputs_mask, training=None): + """Return the output of the decoder layer stacks. + + Args: + encoder_inputs: tensor with shape + [batch_size, input_length, hidden_size] + encoder_inputs_mask: Mask for enccoder input. [batch_size, input_length] + training: Boolean indicating whether the call is training or inference. + + Returns: + Finaly layer encoder output. float tensor with shape + [batch_size, input_length, hidden_size] + """ + if self.params["attention_type"] == "block_sparse": + # reshape and cast for blocking + encoder_length = self.params["max_encoder_length"] + encoder_block_size = self.params["block_size"] + encoder_inputs_mask = tf.cast(encoder_inputs_mask, tf.float32) + blocked_encoder_mask = tf.reshape( + encoder_inputs_mask, + (-1, encoder_length // encoder_block_size, encoder_block_size), + ) + encoder_from_mask = tf.reshape(encoder_inputs_mask, (-1, 1, encoder_length, 1)) + encoder_to_mask = tf.reshape(encoder_inputs_mask, (-1, 1, 1, encoder_length)) + + # create band padding + band_mask = attention.create_band_mask_from_inputs(blocked_encoder_mask, blocked_encoder_mask) + + # For unused masks 0 instead of None for compatilibity with recompute_grad + attention_mask = 0.0 + + else: + # For unused masks 0 instead of None for compatilibity with recompute_grad + blocked_encoder_mask = 0.0 + encoder_to_mask = 0.0 + encoder_from_mask = 0.0 + band_mask = 0.0 + + encoder_inputs_mask = tf.cast(encoder_inputs_mask, tf.float32) + attention_mask = attention.create_attention_mask_from_input_mask(encoder_inputs_mask, encoder_inputs_mask) + + if self.params["norm_type"] == "postnorm": + encoder_inputs = self.layer_norm(encoder_inputs) + + layer_output = encoder_inputs + for layer in self.encoder_layers: + layer_output = layer( + layer_output, + attention_mask, + band_mask, + encoder_from_mask, + encoder_to_mask, + blocked_encoder_mask, + training=training, + ) + + if self.params["norm_type"] == "prenorm": + layer_output = self.layer_norm(layer_output) + + return layer_output diff --git a/pretrain/kobigbird/flags.py b/pretrain/kobigbird/flags.py new file mode 100644 index 0000000..5592cf4 --- /dev/null +++ b/pretrain/kobigbird/flags.py @@ -0,0 +1,310 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Common flag definitions.""" + +import json + +import tensorflow.compat.v2 as tf +from absl import flags, logging +from transformers import AutoTokenizer + +############################### FLAGS UTILS #################################### + +FLAGS = flags.FLAGS +DEFINE_bool = flags.DEFINE_bool +DEFINE_enum = flags.DEFINE_enum +DEFINE_float = flags.DEFINE_float +DEFINE_integer = flags.DEFINE_integer +DEFINE_string = flags.DEFINE_string + + +# Flag names are globally defined! So in general, we need to be +# careful to pick names that are unlikely to be used by other libraries. +# If there is a conflict, we'll get an error at import time. + +# Basic model config flags + +flags.DEFINE_float( + "attention_probs_dropout_prob", + 0.1, + "The dropout probability for attention coefficients when using original.", +) +flags.DEFINE_string( + "hidden_act", + "gelu", + "The non-linear activation function (function or string) in the encoder " "and pooler.", +) +flags.DEFINE_float( + "hidden_dropout_prob", + 0.1, + "The dropout probability for all fully connected layers in the embeddings, " "encoder, decoder, and pooler.", +) +flags.DEFINE_integer("hidden_size", 768, "Size of the transformer layers and the pooler layer.") +flags.DEFINE_float( + "initializer_range", + 0.02, + "The stdev of the truncated_normal_initializer for initializing all " "weight matrices.", +) +flags.DEFINE_integer( + "intermediate_size", + 3072, + "The size of intermediate (i.e. feed-forward) layer in the Transformer.", +) +flags.DEFINE_integer( + "max_position_embeddings", + 4096, + "The size position embeddings of matrix, which dictates the maximum" "length for which the model can be run.", +) +flags.DEFINE_integer( + "num_attention_heads", + 12, + "Number of attention heads for each attention layer in the Transformer.", +) +flags.DEFINE_integer( + "num_hidden_layers", + 12, + "Number of hidden layers in the model (same for encoder and decoder).", +) +flags.DEFINE_bool("use_token_type", True, "Whether to use token type embedding (True for BERT, False for RoBERTa") +flags.DEFINE_integer("type_vocab_size", 2, "The vocabulary size of the `token_type_ids`.") +flags.DEFINE_bool("use_bias", True, "Whether to use bias for key/query/value.") +flags.DEFINE_bool( + "rescale_embedding", + False, + "Whether to rescale word embedding by hidden dimensions.", +) +flags.DEFINE_bool( + "use_gradient_checkpointing", + False, + "Whether to recompute encoder fwd pass during back prop for saving memory.", +) +flags.DEFINE_string("scope", "bert", "Variable scope name.") + +# NOTE Changed for BERT Wordpiece Tokenizer +flags.DEFINE_string( + "tokenizer_dir", + "tokenizer", + "Tokenizer directory. We will use BERT Wordpiece Tokenizer which is compatible with transformers library", +) + +# Simulated and Block attention settings + +flags.DEFINE_enum( + "attention_type", + "block_sparse", + ["original_full", "simulated_sparse", "block_sparse"], + "Selecting attention implementation. " + "'original_full': full attention from original bert. " + "'simulated_sparse': simulated sparse attention. " + "'block_sparse': blocked implementation of sparse attention.", +) +flags.DEFINE_enum( + "norm_type", + "postnorm", + ["prenorm", "postnorm"], + "Selecting when to apply layer-norm. " + "'prenorm': Before attention layer, e.g. Pegasus. " + "'postnorm': After attention layer, e.g. Bert.", +) +flags.DEFINE_integer("block_size", 16, "The block size for the attention mask.") +flags.DEFINE_integer("num_rand_blocks", 3, "Number of random blocks per row.") + +# Adaptive optimizer configs + +flags.DEFINE_float("weight_decay_rate", 0.01, "L2 penalty as weight decay to be used.") + +flags.DEFINE_float("optimizer_beta1", 0.9, "The exponential decay rate for the 1st moment estimates.") + +flags.DEFINE_float("optimizer_beta2", 0.999, "The exponential decay rate for the 2nd moment estimates.") + +flags.DEFINE_float("optimizer_epsilon", 1e-6, "Adaptivty trade-off parameter.") + +# TPU settings + +flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.") + +flags.DEFINE_string( + "tpu_name", + None, + "The Cloud TPU to use for training. This should be either the name " + "used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 " + "url.", +) + +flags.DEFINE_string( + "tpu_zone", + None, + "[Optional] GCE zone where the Cloud TPU is located in. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.", +) + +flags.DEFINE_string("tpu_job_name", None, "Name of TPU worker, if anything other than 'tpu_worker'") + +flags.DEFINE_string( + "gcp_project", + None, + "[Optional] Project name for the Cloud TPU-enabled project. If not " + "specified, we will attempt to automatically detect the GCE project from " + "metadata.", +) + +flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.") + +flags.DEFINE_integer( + "num_tpu_cores", + 8, + "Only used if `use_tpu` is True. Total number of TPU cores to use.", +) + +flags.DEFINE_integer("iterations_per_loop", 200, "How many steps to make in each estimator call.") + + +def as_dictionary(): + """Get current config from flag.""" + + config = { + # transformer basic configs + "attention_probs_dropout_prob": FLAGS.attention_probs_dropout_prob, + "hidden_act": FLAGS.hidden_act, + "hidden_dropout_prob": FLAGS.hidden_dropout_prob, + "hidden_size": FLAGS.hidden_size, + "initializer_range": FLAGS.initializer_range, + "intermediate_size": FLAGS.intermediate_size, + "max_position_embeddings": FLAGS.max_position_embeddings, + "num_attention_heads": FLAGS.num_attention_heads, + "num_hidden_layers": FLAGS.num_hidden_layers, + "type_vocab_size": FLAGS.type_vocab_size, + "scope": FLAGS.scope, + "use_bias": FLAGS.use_bias, + "rescale_embedding": FLAGS.rescale_embedding, + "use_gradient_checkpointing": FLAGS.use_gradient_checkpointing, + "tokenizer_dir": FLAGS.tokenizer_dir, + "use_token_type": FLAGS.use_token_type, + "random_pos_emb": FLAGS.random_pos_emb, + # sparse mask configs + "attention_type": FLAGS.attention_type, + "norm_type": FLAGS.norm_type, + "block_size": FLAGS.block_size, + "num_rand_blocks": FLAGS.num_rand_blocks, + # common bert configs + "data_dir": FLAGS.data_dir, + "output_dir": FLAGS.output_dir, + "init_checkpoint": FLAGS.init_checkpoint, + "max_encoder_length": FLAGS.max_encoder_length, + "do_train": FLAGS.do_train, + "do_eval": FLAGS.do_eval, + "train_batch_size": FLAGS.train_batch_size, + "eval_batch_size": FLAGS.eval_batch_size, + "optimizer": FLAGS.optimizer, + "learning_rate": FLAGS.learning_rate, + "num_train_steps": FLAGS.num_train_steps, + "num_warmup_steps": FLAGS.num_warmup_steps, + "save_checkpoints_steps": FLAGS.save_checkpoints_steps, + "weight_decay_rate": FLAGS.weight_decay_rate, + "optimizer_beta1": FLAGS.optimizer_beta1, + "optimizer_beta2": FLAGS.optimizer_beta2, + "optimizer_epsilon": FLAGS.optimizer_epsilon, + # TPU settings + "use_tpu": FLAGS.use_tpu, + "tpu_name": FLAGS.tpu_name, + "tpu_zone": FLAGS.tpu_zone, + "tpu_job_name": FLAGS.tpu_job_name, + "gcp_project": FLAGS.gcp_project, + "master": FLAGS.master, + "num_tpu_cores": FLAGS.num_tpu_cores, + "iterations_per_loop": FLAGS.iterations_per_loop, + } + + # pretraining dedicated flags + if hasattr(FLAGS, "max_predictions_per_seq"): + config["max_predictions_per_seq"] = FLAGS.max_predictions_per_seq + if hasattr(FLAGS, "masked_lm_prob"): + config["masked_lm_prob"] = FLAGS.masked_lm_prob + if hasattr(FLAGS, "max_eval_steps"): + config["max_eval_steps"] = FLAGS.max_eval_steps + if hasattr(FLAGS, "use_nsp"): + config["use_nsp"] = FLAGS.use_nsp + if hasattr(FLAGS, "keep_checkpoint_max"): + config["keep_checkpoint_max"] = FLAGS.keep_checkpoint_max + if hasattr(FLAGS, "seed"): + config["seed"] = FLAGS.seed + + # classifier dedicated flags + if hasattr(FLAGS, "num_labels"): + config["num_labels"] = FLAGS.num_labels + + # summarization dedicated flags + if hasattr(FLAGS, "max_decoder_length"): + config["max_decoder_length"] = FLAGS.max_decoder_length + if hasattr(FLAGS, "trainable_bias"): + config["trainable_bias"] = FLAGS.trainable_bias + if hasattr(FLAGS, "couple_encoder_decoder"): + config["couple_encoder_decoder"] = FLAGS.couple_encoder_decoder + if hasattr(FLAGS, "beam_size"): + config["beam_size"] = FLAGS.beam_size + if hasattr(FLAGS, "alpha"): + config["alpha"] = FLAGS.alpha + if hasattr(FLAGS, "label_smoothing"): + config["label_smoothing"] = FLAGS.label_smoothing + if hasattr(FLAGS, "do_mlm"): + config["do_mlm"] = FLAGS.do_mlm + + # Vocab info + tokenizer = AutoTokenizer.from_pretrained(FLAGS.tokenizer_dir) + config["vocab_size"] = tokenizer.vocab_size + config["pad_token_id"] = tokenizer.pad_token_id + config["unk_token_id"] = tokenizer.unk_token_id + config["cls_token_id"] = tokenizer.cls_token_id + config["sep_token_id"] = tokenizer.sep_token_id + config["mask_token_id"] = tokenizer.mask_token_id + config["bos_token_id"] = tokenizer.bos_token_id + config["eos_token_id"] = tokenizer.eos_token_id + + return config + + +def save(path): + """Save current flag config.""" + config = as_dictionary() + with tf.io.gfile.GFile(path, "w") as f: + json.dump(config, f, indent=4, sort_keys=True) + + # log flags + max_len = max([len(ii) for ii in config.keys()]) + fmt_string = "\t%" + str(max_len) + "s : %s" + logging.info("Arguments:") + for key, value in sorted(config.items()): + logging.info(fmt_string, key, value) + + return config + + +def load(path): + """Set flag from saved config.""" + + with tf.io.gfile.GFile(path) as f: + config = json.load(f) + + # log and set flags + max_len = max([len(ii) for ii in config.keys()]) + fmt_string = "\t%" + str(max_len) + "s : %s" + logging.info("Arguments:") + for key, value in config.items(): + if hasattr(FLAGS, key): + logging.info(fmt_string, key, value) + setattr(FLAGS, key, value) + + return config diff --git a/pretrain/kobigbird/modeling.py b/pretrain/kobigbird/modeling.py new file mode 100644 index 0000000..ef206b6 --- /dev/null +++ b/pretrain/kobigbird/modeling.py @@ -0,0 +1,470 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The main BigBird model and related functions.""" + +import copy + +import tensorflow.compat.v2 as tf +from absl import logging +from kobigbird import decoder, encoder, utils + + +class BertModel(tf.keras.layers.Layer): + """BERT model ("Bidirectional Encoder Representations from Transformers"). + + Example usage: + + ```python + # Already been converted into SentencePiece token ids + input_ids = tf.constant([[31, 51, 99], [15, 5, 0]]) + token_type_ids = tf.constant([[0, 0, 1], [0, 2, 0]]) + + params = utils.BigBirdConfig(vocab_size=32000, hidden_size=512, + num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024) + + model = modeling.BertModel(params, train=True) + + _, pooled_output = model(input_ids=input_ids, token_type_ids=token_type_ids) + + label_embeddings = tf.get_variable(...) + logits = tf.matmul(pooled_output, label_embeddings) + ... + ``` + """ + + def __init__(self, params): + """Constructor for BertModel. + + Args: + params: `BigBirdConfig` dictionary. + """ + self.params = copy.deepcopy(params) + self.scope = params["scope"] + super(BertModel, self).__init__(name=self.scope) + + # validate params + self.pad = lambda x: x + if params["max_encoder_length"] <= 512: + logging.info("Switching to full attention for short sequences") + self.params["attention_type"] = "original_full" + if self.params["attention_type"] == "simulated_sparse" or self.params["attention_type"] == "block_sparse": + if params["max_encoder_length"] % params["block_size"]: + logging.info("Expand max_encoder_length to next multiple of block_size") + self.params["max_encoder_length"] = (params["max_encoder_length"] // params["block_size"] + 1) * params[ + "block_size" + ] + pad_size = self.params["max_encoder_length"] - params["max_encoder_length"] + paddings = [[0, 0], [0, pad_size]] + self.pad = lambda x: tf.pad(x, paddings) + + with tf.compat.v1.variable_scope(self.scope, reuse=tf.compat.v1.AUTO_REUSE): + self.embeder = utils.EmbeddingLayer( + vocab_size=self.params["vocab_size"], + emb_dim=self.params["hidden_size"], + initializer=utils.create_initializer(self.params["initializer_range"]), + scale_emb=self.params["rescale_embedding"], + use_token_type=self.params["use_token_type"], + num_token_types=self.params["type_vocab_size"], + use_position_embeddings=True, + max_position_embeddings=self.params["max_position_embeddings"], + dropout_prob=self.params["hidden_dropout_prob"], + ) + self.encoder = encoder.EncoderStack(self.params) + self.pooler = utils.SimpleDenseLayer( + input_size=self.params["hidden_size"], + output_size=self.params["hidden_size"], + initializer=utils.create_initializer(self.params["initializer_range"]), + activation=tf.tanh, + name="pooler/dense", + ) + + def call(self, input_ids, token_type_ids=None, training=None): + """Constructor for BertModel. + + Args: + input_ids: int32 Tensor of shape [batch_size, seq_length]. + token_type_ids: (optional) int32 Tensor of shape [batch_size, seq_length]. + training: Boolean indicating whether the call is training or inference. + + Returns: + sequence_output: Tensor of shape [batch_size, seq_length, hidden_size] + pooled_output: Tensor of shape [batch_size, hidden_size] + + Raises: + ValueError: The config is invalid or one of the input tensor shapes + is invalid. + """ + # pad if needed + input_ids = self.pad(input_ids) + + if token_type_ids is None: + token_type_ids = tf.zeros_like(input_ids, dtype=tf.int32) + else: + token_type_ids = self.pad(token_type_ids) + + # Perform embedding lookup on the word ids. + embedding_output = self.embeder( + input_ids, + self.params["max_encoder_length"], + token_type_ids=token_type_ids, + training=training, + ) + + # Generate mask. + input_mask = tf.where( + input_ids != self.params["pad_token_id"], tf.ones_like(input_ids), tf.zeros_like(input_ids) + ) + + # Run the stacked transformer. + sequence_output = self.encoder(embedding_output, input_mask, training) + + # The "pooler" converts the encoded sequence tensor of shape + # [batch_size, seq_length, hidden_size] to a tensor of shape + # [batch_size, hidden_size]. This is necessary for segment-level + # (or segment-pair-level) classification tasks where we need a fixed + # dimensional representation of the segment. + first_token_tensor = sequence_output[:, 0, :] + # We "pool" the model by simply taking the hidden state corresponding + # to the first token. We assume that this has been pre-trained + pooled_output = self.pooler(first_token_tensor) + + return sequence_output, pooled_output + + +class TransformerModel(tf.keras.layers.Layer): + """Encoder-Decoder transformer model. + + Example usage: + + ```python + # Already been converted into SentencePiece token ids + input_ids = tf.constant([[31, 51, 99], [15, 5, 0]]) + target_ids = tf.constant([[43, 76, 38], [56, 8, 0]]) + + params = utils.BigBirdConfig(vocab_size=32000, hidden_size=512, + num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024) + + model = modeling.TransformerModel(params, train=True) + + predictions, _ = model(input_ids=input_ids, target_ids=target_ids) + + log_probs, logits, pred_ids = predictions + ... + ``` + """ + + def __init__(self, params): + """Constructor for TransformerModel. + + Args: + params: `BigBirdConfig` dictionary. + """ + self.params = copy.deepcopy(params) + self.scope = params["scope"] + super(TransformerModel, self).__init__(name=self.scope) + + # validate params + self.pad = lambda x: x + if params["max_encoder_length"] <= 512: + logging.info("Switching to full attention for short sequences") + self.params["attention_type"] = "original_full" + if self.params["attention_type"] == "simulated_sparse" or self.params["attention_type"] == "block_sparse": + if params["max_encoder_length"] % params["block_size"]: + logging.info("Expand max_encoder_length to next multiple of block_size") + self.params["max_encoder_length"] = (params["max_encoder_length"] // params["block_size"] + 1) * params[ + "block_size" + ] + pad_size = self.params["max_encoder_length"] - params["max_encoder_length"] + paddings = [[0, 0], [0, pad_size]] + self.pad = lambda x: tf.pad(x, paddings) + + with tf.compat.v1.variable_scope(self.scope, reuse=tf.compat.v1.AUTO_REUSE): + self.embeder = utils.EmbeddingLayer( + vocab_size=self.params["vocab_size"], + emb_dim=self.params["hidden_size"], + initializer=utils.create_initializer(self.params["initializer_range"]), + scale_emb=self.params["rescale_embedding"], + use_token_type=self.params["use_token_type"], + num_token_types=self.params["type_vocab_size"], + use_position_embeddings=True, + max_position_embeddings=self.params["max_position_embeddings"], + dropout_prob=self.params["hidden_dropout_prob"], + ) + self.encoder = encoder.EncoderStack(self.params) + self.decoder = decoder.DecoderStack(self.params) + + def _encode(self, input_ids, training=None): + """Generate continuous representation for ids. + + Args: + input_ids: Int tensor with shape [batch_size, input_length]. + training: Boolean indicating whether the call is training or inference. + + Returns: + A float tensors of shape + [batch_size, input_length, hidden_size]. + """ + # pad if needed + input_ids = self.pad(input_ids) + + # Perform embedding lookup on the word ids. + input_embs = self.embeder(input_ids, self.params["max_encoder_length"], training=training) + + # Generate mask. + input_mask = tf.where( + input_ids > self.params["pad_token_id"], tf.ones_like(input_ids), tf.zeros_like(input_ids) + ) + + # Run the stacked transformer. + encoder_output = self.encoder(input_embs, input_mask, training=training) + + return encoder_output, input_mask + + def _get_start_token_ids(self, tensor_for_shape): + batch_size = utils.get_shape_list(tensor_for_shape)[0] + return tf.ones([batch_size], dtype=tf.int32) * self.params["bos_token_id"] + + def get_inputs_from_targets(self, targets, start_token_ids): + """Converts target ids to input ids, i.e. adds and removes last.""" + length = tf.math.count_nonzero(targets, axis=1, dtype=tf.int32) + # Add start token ids. + inputs = tf.concat([tf.expand_dims(start_token_ids, axis=1), targets], 1) + # Remove from the input. + mask = tf.sequence_mask(length, self.params["max_decoder_length"] + 1, dtype=tf.int32) + inputs = (mask * inputs)[:, :-1] + return inputs + + def _decode( + self, + target_ids, + target_mask, + start_token_ids, + encoder_output, + encoder_mask, + training=None, + ): + """Compute likelihood of target tokens under the model. + + Args: + target_ids: tensor with shape [batch_size, target_length, hidden_size] + target_mask: self-attention bias for decoder attention layer. [batch_size, + input_length] + start_token_ids: int32 tensor of shape [batch_size] for first decoder + input. + encoder_output: Continuous representation of input sequence. Float tensor + with shape [batch_size, input_length, hidden_size]. + encoder_mask: Float tensor with shape [batch_size, input_length]. + training: Boolean indicating whether the call is training or inference. + + Returns: + A dict containing the output ids, the output log-probs, the output logits. + """ + + # Prepare inputs to decoder layers by shifting targets, embedding ids, + # adding positional encoding and applying dropout. + input_ids = self.get_inputs_from_targets(target_ids, start_token_ids) + + input_embs = self.embeder(input_ids, self.params["max_decoder_length"], training=training) + + outputs = self.decoder(input_embs, target_mask, encoder_output, encoder_mask, training=training) + + logits = self.embeder.linear(outputs) + output_ids = tf.cast(tf.argmax(logits, axis=-1), tf.int32) + + log_probs = -tf.nn.sparse_softmax_cross_entropy_with_logits(labels=target_ids, logits=logits) + log_probs = tf.where(target_ids > self.params["pad_token_id"], log_probs, tf.zeros_like(log_probs, tf.float32)) + + return ( + tf.identity(log_probs, name="log_probs"), + tf.identity(logits, name="logits"), + tf.cast(output_ids, tf.int32, name="pred_ids"), + ) + + def _init_cache(self, batch_size): + """Initialize cache for decoding.""" + + max_decode_len = self.params["max_decoder_length"] + num_heads = self.params["num_attention_heads"] + head_size = int(self.params["hidden_size"] / num_heads) + + cache = {} + for layer in range(self.params["num_hidden_layers"]): + cache["layer_%d" % layer] = { + "k": tf.zeros([batch_size, num_heads, max_decode_len, head_size]), + "v": tf.zeros([batch_size, num_heads, max_decode_len, head_size]), + } + return cache + + def _get_symbols_to_logits_fn(self, decoder_self_attention_mask): + """Returns a decoding function that calculates logits of the next tokens.""" + + max_decode_len = self.params["max_decoder_length"] + + def _symbols_to_logits_fn(target_ids, cache, i): + """Generate logits for next candidate IDs. + + Args: + target_ids: Current decoded sequences. int tensor with shape + [batch_size, i + 1] + cache: dictionary of values storing the encoder output, encoder-decoder + attention bias, and previous decoder attention values. + i: Loop index + + Returns: + Tuple of + (logits with shape [batch_size * beam_size, vocab_size], + updated cache values) + """ + decoder_input = tf.slice( + target_ids, + [0, tf.maximum(tf.cast(0, i.dtype), i - 1)], + [target_ids.shape[0], 1], + ) + self_attention_mask = tf.slice(decoder_self_attention_mask, [0, 0, i, 0], [1, 1, 1, max_decode_len]) + + # Preprocess decoder input by getting embeddings and adding timing signal. + decoder_input = self.embeder(decoder_input, 1, start_pos=i, training=False) + + decoder_output = self.decoder( + decoder_input, + self_attention_mask, + cache.get("encoder_output"), + cache.get("encoder_mask"), + cache=cache, + decode_i=i, + training=False, + ) + + logits = self.embeder.linear(decoder_output) + logits = tf.squeeze(logits, axis=[1]) + + return logits + + return _symbols_to_logits_fn + + def _predict(self, target_ids, target_mask, start_token_ids, encoder_output, encoder_mask): + """Beam decode output tokens and probabilities. + + Args: + target_ids: tensor with shape [batch_size, target_length, hidden_size] + target_mask: self-attention bias for decoder attention layer. [batch_size, + input_length] + start_token_ids: int32 tensor of shape [batch_size] for first decoder + input. + encoder_output: Continuous representation of input sequence. Float + tensor with shape [batch_size, target_length, num_hidden_layers, + hidden_size] + encoder_mask: bias for encoder-decoder attention layer. [batch_size, + input_length] + + Returns: + A tuple of: + `log_probs`: Log-probs of output tokens. + `logits`: Logits of output tokens. + `pred_ids`: Predicted output sequence. + """ + batch_size = utils.get_shape_list(start_token_ids)[0] + end_token_id = self.params["eos_token_id"] + + # One step logit function. + symbols_to_logits_fn = self._get_symbols_to_logits_fn(target_mask) + + # Create cache storing decoder attention values for each layer. + cache = self._init_cache(batch_size) + + if encoder_output is not None: + # Add encoder output and attention bias to the cache. + cache["encoder_output"] = encoder_output + cache["encoder_mask"] = encoder_mask + + decoded_ids = decoder.left2right_decode( + symbols_to_logits_fn, + start_token_ids, + cache, + batch_size, + self.params["max_decoder_length"], + vocab_size=self.params["vocab_size"], + beam_size=self.params["beam_size"], + beam_start=5, + beam_alpha=self.params["alpha"], + beam_min=0, + beam_max=-1, + eos_id=end_token_id, + ) + + # Get the top sequence for each batch element + output_ids = tf.cast(decoded_ids, tf.int32, name="pred_ids") + + # Calculate log probs for given sequence if available. + calc_ids = output_ids if target_ids is None else target_ids + output_log_probs, output_logits, _ = self._decode( + calc_ids, + target_mask, + start_token_ids, + encoder_output, + encoder_mask, + training=False, + ) + + return (output_log_probs, output_logits, output_ids) + + def _decode_and_predict(self, target_ids, encoder_output, encoder_mask, training=None): + """Decodes a sequence given the input and the encoder. + + Args: + target_ids: tensor with shape [batch_size, target_length, hidden_size] + encoder_output: Continuous representation of input sequence. Float + tensor with shape [batch_size, target_length, num_hidden_layers, + hidden_size] + encoder_mask: bias for encoder-decoder attention layer. [batch_size, + input_length] + training: Boolean indicating whether the call is training or inference. + + Returns: + A tuple of: + `log_probs`: Log-probs of output tokens. + `logits`: Logits of output tokens. + `pred_ids`: Predicted output sequence. + """ + # Create initial set of IDs that will be passed into symbols_to_logits_fn. + start_token_ids = self._get_start_token_ids(encoder_output) + + # Create causal self-attention mask for decoder. + target_mask = decoder.create_self_attention_mask(self.params["max_decoder_length"]) + + predictions = {} + if training: + predictions = self._decode( + target_ids, + target_mask, + start_token_ids, + encoder_output, + encoder_mask, + training=True, + ) + else: + predictions = self._predict(target_ids, target_mask, start_token_ids, encoder_output, encoder_mask) + + return predictions + + def call(self, input_ids, target_ids=None, training=None): + # Run the inputs through the encoder layer to map the symbol + # representations to continuous representations. + encoder_output, encoder_mask = self._encode(input_ids, training=training) + + # Decode. + predictions = self._decode_and_predict(target_ids, encoder_output, encoder_mask, training=training) + + return predictions, encoder_output diff --git a/pretrain/kobigbird/optimization.py b/pretrain/kobigbird/optimization.py new file mode 100644 index 0000000..0eb8fa7 --- /dev/null +++ b/pretrain/kobigbird/optimization.py @@ -0,0 +1,273 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions and classes related to optimization (weight updates).""" + +import re + +import tensorflow.compat.v2 as tf +from absl import logging + +# pylint: disable=g-direct-tensorflow-import +from tensorflow.python.ops import resource_variable_ops + + +def get_linear_warmup_linear_decay_lr(init_lr, num_train_steps, num_warmup_steps): + """Calculate learning rate with linear warmup and linear decay.""" + global_step = tf.compat.v1.train.get_or_create_global_step() + + learning_rate = tf.constant(value=init_lr, shape=[], dtype=tf.float32) + + # Implements linear decay of the learning rate. + learning_rate = tf.compat.v1.train.polynomial_decay( + learning_rate, + global_step, + num_train_steps, + end_learning_rate=0.0, + power=1.0, + cycle=False, + ) + + # Implements linear warmup. I.e., if global_step < num_warmup_steps, the + # learning rate will be `global_step/num_warmup_steps * init_lr`. + if num_warmup_steps: + global_steps_int = tf.cast(global_step, tf.int32) + warmup_steps_int = tf.constant(num_warmup_steps, dtype=tf.int32) + + global_steps_float = tf.cast(global_step, tf.float32) + warmup_steps_float = tf.cast(num_warmup_steps, tf.float32) + + warmup_percent_done = global_steps_float / warmup_steps_float + warmup_learning_rate = init_lr * warmup_percent_done + + is_warmup = tf.cast(global_steps_int < warmup_steps_int, tf.float32) + learning_rate = (1.0 - is_warmup) * learning_rate + is_warmup * warmup_learning_rate + + return learning_rate + + +def get_linear_warmup_rsqrt_decay_lr(init_lr, hidden_size, num_warmup_steps): + """Calculate learning rate with linear warmup and rsqrt decay.""" + num_warmup_steps = tf.cast(num_warmup_steps, tf.float32) + global_step = tf.compat.v1.train.get_or_create_global_step() + global_step = tf.cast(global_step, tf.float32) + + learning_rate = tf.constant(value=init_lr, shape=[], dtype=tf.float32) + learning_rate *= tf.math.rsqrt(tf.cast(hidden_size, tf.float32)) + # Apply linear warmup + learning_rate *= tf.minimum(1.0, global_step / num_warmup_steps) + # Apply rsqrt decay + learning_rate *= tf.math.rsqrt(tf.maximum(global_step, num_warmup_steps)) + + return learning_rate + + +def get_optimizer(params, learning_rate): + """Gets the optimzer based on the hparams and current mode (TPU vs. CPU/GPU). + + Args: + params: A dictionary containing training hyperparameters. + learning_rate: A float32 scalar. + + Returns: + A string or an optimizer instance. + """ + optimizer = None + + if params["optimizer"] == "Adafactor": + try: + from tensor2tensor.utils import adafactor # pylint: disable=g-import-not-at-top + + optimizer = adafactor.AdafactorOptimizer(learning_rate=learning_rate) + except ImportError: + logging.error("tensor2tensor not installed. Cannot use Adafactor." "Defaulting to Adam.") + params["optimizer"] = "Adam" + + if params["optimizer"] == "Adam": + optimizer = tf.compat.v1.train.AdamOptimizer( + learning_rate, + beta1=params["optimizer_beta1"], + beta2=params["optimizer_beta2"], + epsilon=params["optimizer_epsilon"], + ) + + if params["optimizer"] == "AdamWeightDecay": + optimizer = AdamWeightDecayOptimizer( + learning_rate, + weight_decay_rate=params["weight_decay_rate"], + beta_1=params["optimizer_beta1"], + beta_2=params["optimizer_beta2"], + epsilon=params["optimizer_epsilon"], + exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], + ) + + if params["optimizer"] == "SGD": + optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate) + + if optimizer is None: + raise ValueError("Unknown optimizer: {}.".format(params["optimizer"])) + + if params["use_tpu"]: + # Average the gradients across TPU cores. + optimizer = tf.compat.v1.tpu.CrossShardOptimizer(optimizer) + + return optimizer + + +class AdamWeightDecayOptimizer(tf.compat.v1.train.Optimizer): + """A basic Adam optimizer that includes "correct" L2 weight decay.""" + + def __init__( + self, + learning_rate, + weight_decay_rate=0.0, + beta_1=0.9, + beta_2=0.999, + epsilon=1e-6, + exclude_from_weight_decay=None, + name="AdamWeightDecayOptimizer", + ): + """Constructs a AdamWeightDecayOptimizer.""" + super(AdamWeightDecayOptimizer, self).__init__(False, name) + + self.learning_rate = learning_rate + self.weight_decay_rate = weight_decay_rate + self.beta_1 = beta_1 + self.beta_2 = beta_2 + self.epsilon = epsilon + self.exclude_from_weight_decay = exclude_from_weight_decay + + def _create_slots(self, var_list): + # Create slots for the first and second moments. + for v in var_list: + self._zeros_slot(v, "m", self._name) + self._zeros_slot(v, "v", self._name) + + def _apply_dense(self, grad, var): + param_name = self._get_variable_name(var.name) + m = self.get_slot(var, "m") + v = self.get_slot(var, "v") + + # Standard Adam update. + next_m = tf.multiply(self.beta_1, m) + tf.multiply(1.0 - self.beta_1, grad) + next_v = tf.multiply(self.beta_2, v) + tf.multiply(1.0 - self.beta_2, tf.square(grad)) + + update = next_m / (tf.sqrt(next_v) + self.epsilon) + + # Just adding the square of the weights to the loss function is *not* + # the correct way of using L2 regularization/weight decay with Adam, + # since that will interact with the m and v parameters in strange ways. + # + # Instead we want ot decay the weights in a manner that doesn't interact + # with the m/v parameters. This is equivalent to adding the square + # of the weights to the loss with plain (non-momentum) SGD. + if self._do_use_weight_decay(param_name): + update += self.weight_decay_rate * var + + update_with_lr = self.learning_rate * update + + next_param = var - update_with_lr + + return tf.group([var.assign(next_param), m.assign(next_m), v.assign(next_v)]) + + def _resource_apply_dense(self, grad, var): + """See `tf.train.Optimizer._resource_apply_dense()`.""" + return self._apply_dense(grad, var) + + def _apply_sparse(self, grad, var): + """See `tf.train.Optimizer._apply_sparse()`.""" + + def scatter_update_fn(x, i, v): + return tf.compat.v1.scatter_update(x, i, v, use_locking=self._use_locking) + + return self._apply_sparse_shared(grad.values, grad.indices, var, scatter_update_fn) + + def _resource_apply_sparse(self, grad, var, indices): + """See `tf.train.Optimizer._resource_apply_spase()`.""" + + def scatter_update_fn(x, i, v): + with tf.control_dependencies([resource_variable_ops.resource_scatter_update(x.handle, i, v)]): + return x.value() + + return self._apply_sparse_shared(grad, indices, var, scatter_update_fn) + + def _apply_sparse_shared(self, grad, indices, var, scatter_update_fn): + """Applies sparse gradients to a variable. + + Args: + grad: A tensor for the `values` of `tf.IndexedSlices`. + indices: A tensor for the `indices` of `tf.IndexedSlices`. + var: A `tf.Variable` object. + scatter_update_fn: A function which performs scattered update to + a `tf.Variable` object. It takes tuple of (x, i, v) where: + * x: A `tf.Variable` object which is updated by `i` and `v`, + * i: A tensor for the `indices` of `tf.IndexedSlices`, + * v: A tensor for the `values` of `tf.IndexedSlices`, + and returns a tensor after updating `x`. + + Returns: + An op which updates `var` with `grad` and `indices`. + """ + param_name = self._get_variable_name(var.name) + m = self.get_slot(var, "m") + v = self.get_slot(var, "v") + + # m_t = beta1 * m + (1 - beta1) * g_t + m_scaled_g_values = tf.multiply(1.0 - self.beta_1, grad) + m_t = m.assign(m * self.beta_1) + with tf.control_dependencies([m_t]): + m_slice = tf.gather(m, indices) + m_scaled_g_values + m_t = scatter_update_fn(m, indices, m_slice) + + # v_t = beta2 * v + (1 - beta2) * g_t^2 + v_scaled_g_values = tf.multiply(1.0 - self.beta_2, tf.square(grad)) + v_t = v.assign(v * self.beta_2) + with tf.control_dependencies([v_t]): + v_slice = tf.gather(v, indices) + v_scaled_g_values + v_t = scatter_update_fn(v, indices, v_slice) + + update = m_t / (tf.sqrt(v_t) + self.epsilon) + + # Just adding the square of the weights to the loss function is *not* + # the correct way of using L2 regularization/weight decay with Adam, + # since that will interact with the m and v parameters in strange ways. + # + # Instead we want ot decay the weights in a manner that doesn't interact + # with the m/v parameters. This is equivalent to adding the square + # of the weights to the loss with plain (non-momentum) SGD. + if self._do_use_weight_decay(param_name): + update += self.weight_decay_rate * var + + update_with_lr = self.learning_rate * update + + next_param = var - update_with_lr + + return tf.group([var.assign(next_param), m_t, v_t]) + + def _do_use_weight_decay(self, param_name): + """Whether to use L2 weight decay for `param_name`.""" + if not self.weight_decay_rate: + return False + if self.exclude_from_weight_decay: + for r in self.exclude_from_weight_decay: + if re.search(r, param_name) is not None: + return False + return True + + def _get_variable_name(self, param_name): + """Get the variable name from the tensor name.""" + m = re.match("^(.*):\\d+$", param_name) + if m is not None: + param_name = m.group(1) + return param_name diff --git a/pretrain/kobigbird/recompute_grad.py b/pretrain/kobigbird/recompute_grad.py new file mode 100644 index 0000000..6d0f6d8 --- /dev/null +++ b/pretrain/kobigbird/recompute_grad.py @@ -0,0 +1,526 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Library for rematerialization. + +Incubates a version of tf.recompute_grad that is XLA compatible. +""" +from typing import Deque, List, NamedTuple, Optional, Sequence, Text, Union + +import collections +import numbers +import os +import threading + +import numpy as np +import tensorflow.compat.v2 as tf +from absl import logging + +# pylint: disable=g-direct-tensorflow-import +from tensorflow.python.framework import ops +from tensorflow.python.ops import custom_gradient + + +# Remove when https://github.com/tensorflow/tensorflow/pull/45298 +# gets merged +def get_variable_by_name(var_name): + """Retrieves tf.Variable from name in MirroredStrategy (multi-gpu).""" + + # Get all variables, but it will have copies from different replicas + all_global_vars = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES) + + def _replica_filter(var): + """Filter out variables from different context.""" + try: + return var_name == var.op.name + except AttributeError: + return False + + candidate_vars = list(filter(_replica_filter, all_global_vars)) + + if len(candidate_vars) >= 1: + # Filter out non-trainable variables. + candidate_vars = [v for v in candidate_vars if v.trainable] + else: + raise ValueError("Unsuccessful at finding variable {}.".format(var_name)) + + if len(candidate_vars) == 1: + return candidate_vars[0] + elif len(candidate_vars) > 1: + raise ValueError( + "Unsuccessful at finding trainable variable {}. " + "Number of candidates: {}. " + "Candidates: {}".format(var_name, len(candidate_vars), candidate_vars) + ) + else: + # The variable is not trainable. + return None + + +custom_gradient.get_variable_by_name = get_variable_by_name + + +class RecomputeContext( + NamedTuple( + "RecomputeContext", + [ + ("is_recomputing", bool), + ("seed", tf.Tensor), + ("children", Deque["RecomputeContext"]), + ], + ) +): + """Context for recomputation. + + Attributes: + is_recomputing: Whether we are in a recomputation phase. + seed: Scalar integer tensor that should be used with stateless random ops + for deterministic behavior and correct computation of the gradient. + children: Nested `RecomputeContext` instances. Used internally by + `recompute_grad` to track nested instances of `RecomputeContext`. + """ + + def __enter__(self): + return _context_stack.push(self) + + def __exit__(self, exc_type, exc_value, traceback): + _context_stack.pop(self) + + +# Simplified version of `_DefaultStack` in +# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/framework/ops.py. +class _ContextStack(threading.local): + """A thread-local stack for providing implicit recompute contexts.""" + + def __init__(self): + super(_ContextStack, self).__init__() + self._stack = [] + + def top(self) -> Optional[RecomputeContext]: + return self._stack[-1] if self._stack else None + + def push(self, context: RecomputeContext): + self._stack.append(context) + return context + + def pop(self, context: RecomputeContext): + if self._stack[-1] is not context: + raise AssertionError("Nesting violated for RecomputeContext.") + self._stack.pop() + + +_context_stack = _ContextStack() + + +def get_recompute_context() -> Optional[RecomputeContext]: + """Returns the current recomputing context if it exists.""" + return _context_stack.top() + + +# Adapted from +# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/ops/control_flow_util.py. +def _get_containing_xla_context(graph: tf.Graph) -> Optional[object]: + """Returns the first ancestor `XLAControlFlowContext` in the `graph`.""" + ctxt = graph._get_control_flow_context() # pylint: disable=protected-access + while ctxt: + if ctxt.IsXLAContext(): + return ctxt + ctxt = ctxt.outer_context + return None + + +def _in_xla_context(graph: Optional[tf.Graph] = None) -> bool: + """Detects whether we are in an XLA context.""" + if "--tf_xla_auto_jit=2" in os.environ.get("TF_XLA_FLAGS", ""): + return True + graph = tf.compat.v1.get_default_graph() if graph is None else graph + while True: + if _get_containing_xla_context(graph) is not None: + return True + try: + graph = graph.outer_graph + except AttributeError: + return False + + +def _force_data_dependency(first_compute: Sequence[tf.Tensor], then_compute: Sequence[tf.Tensor]) -> List[tf.Tensor]: + """Force all of `then_compute` to depend on all of `first_compute`. + + Uses a dummy data dependency, which is useful when running on TPUs because + XLA ignores control dependencies. Only supports float arguments. + + Args: + first_compute: Sequence of `Tensor`s to be executed before `then_compute`. + then_compute: Sequence of `Tensor`s to executed after `first_compute`. + + Returns: + Sequence of `Tensor`s with same length of `then_compute`. + + Raises: + ValueError: if ranks are unknown or types are not floating. + """ + + def _first_element(x): + if x.shape.ndims is None: + raise ValueError("Rank of Tensor %s must be known" % x) + ndims = x.shape.ndims + begin = tf.zeros(ndims, dtype=tf.int32) + size = tf.ones(ndims, dtype=tf.int32) + return tf.reshape(tf.slice(x, begin, size), []) + + first_compute_sum = tf.add_n([_first_element(x) for x in first_compute if x is not None]) + dtype = first_compute_sum.dtype + if not dtype.is_floating: + raise ValueError("_force_data_dependency only supports floating dtypes.") + zero = np.finfo(dtype.as_numpy_dtype).tiny * first_compute_sum + return [x + tf.cast(zero, x.dtype) if x is not None else None for x in then_compute] + + +def _make_seed_if_none(seed: Optional[tf.Tensor]) -> tf.Tensor: + """Uses the global generator to make a seed if necessary.""" + if seed is not None: + return seed + generator = tf.random.experimental.get_global_generator() + # The two seeds for stateless random ops don't have individual semantics and + # are scrambled together, so providing one seed is fine. This makes it easier + # for users to provide a local seed without worrying about integer overflow. + # See `make_seeds` in + # https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/ops/stateful_random_ops.py. + try: + return generator.uniform_full_int([], tf.int32, name="recompute_grad_seed") + except (RuntimeError, TypeError, ValueError, tf.errors.NotFoundError) as e: + # For a number of reasons, the above operation can fail like using multiple + # graphs or toggling between eager and graph modes. Reset the generator. + logging.warn("Resetting the generator. %s: %s", type(e), e) + tf.random.experimental.set_global_generator(None) + generator = tf.random.experimental.get_global_generator() + return generator.uniform_full_int([], tf.int32, name="recompute_grad_seed") + + +def recompute_grad(f, seed=None): + """An eager-compatible version of recompute_grad. + + For f(*args, **kwargs), this supports gradients with respect to args, or to + gradients with respect to any variables residing in the kwarg 'variables'. + Note that for keras layer and model objects, this is handled automatically. + + Warning: If `f` was originally a tf.keras Model or Layer object, `g` will not + be able to access the member variables of that object, because `g` returns + through the wrapper function `inner`. When recomputing gradients through + objects that inherit from keras, we suggest keeping a reference to the + underlying object around for the purpose of accessing these variables. + + Args: + f: function `f(*x)` that returns a `Tensor` or sequence of `Tensor` outputs. + seed: Optional seed for random ops. `seed` should an integer scalar + `Tensor`. When compiling to XLA, `seed` must have dtype `tf.int32`. If + `seed` is not provided one will be generated. + + Returns: + A function `g` that wraps `f`, but which recomputes `f` on the backwards + pass of a gradient call. + """ + + @tf.custom_gradient + def inner(*args, **kwargs): + """Inner function closure for calculating gradients.""" + # Detect when we're nested and in the backwards pass, so we don't generate + # an additional seed. + parent_context = get_recompute_context() + if parent_context is not None and parent_context.is_recomputing: + # Use the cached context in the recomputation phase. + with parent_context.children.popleft()._replace(is_recomputing=True) as context: + result = f(*args, **kwargs) + else: + with RecomputeContext( + is_recomputing=False, + seed=_make_seed_if_none(seed), + children=collections.deque(), + ) as context: + result = f(*args, **kwargs) + # In the forward pass, build up a tree of recomputation contexts. + if parent_context is not None and not parent_context.is_recomputing: + parent_context.children.append(context) + + def grad(*dresult, **grad_kwargs): + """Gradient function calculation for inner function.""" + variables = grad_kwargs.pop("variables", None) + if grad_kwargs: + raise ValueError("Found unexpected kwargs for `grad`: ", list(grad_kwargs.keys())) + inputs, seed = list(args), context.seed + if _in_xla_context(): + inputs = _force_data_dependency(tf.nest.flatten(dresult), inputs + [seed]) + seed = inputs.pop() + # tf.keras.backend.set_learning_phase(1) + with tf.GradientTape() as tape: + tape.watch(inputs) + if variables is not None: + tape.watch(variables) + with tf.control_dependencies(dresult): + with context._replace(is_recomputing=True, seed=seed): + result = f(*inputs, **kwargs) + kw_vars = [] + if variables is not None: + kw_vars = list(variables) + grads = tape.gradient(result, list(inputs) + kw_vars, output_gradients=dresult) + return grads[: len(inputs)], grads[len(inputs) :] + + return result, grad + + return inner + + +######################## STATELESS DROPOUT LAYERS ############################## + + +def _as_shape(shape: Union[Sequence[int], tf.TensorShape]) -> tf.TensorShape: + """Converts the given object to a TensorShape.""" + return shape if isinstance(shape, tf.TensorShape) else tf.TensorShape(shape) + + +def _get_noise_shape( + x: tf.Tensor, noise_shape: Union[Sequence[int], tf.TensorShape] +) -> Union[tf.Tensor, tf.TensorShape, Sequence[int]]: + """Computes the shape of the binary mask for dropout.""" + # If noise_shape is none return immediately. + if noise_shape is None: + return tf.shape(x) + + try: + # Best effort to figure out the intended shape. + # If not possible, let the op to handle it. + # In eager mode exception will show up. + noise_shape_ = _as_shape(noise_shape) + except (TypeError, ValueError): + return noise_shape + + if x.shape.dims is not None and len(x.shape.dims) == len(noise_shape_.dims): + new_dims = [] + for i, dim in enumerate(x.shape.dims): + if noise_shape_.dims[i].value is None and dim.value is not None: + new_dims.append(dim.value) + else: + new_dims.append(noise_shape_.dims[i].value) + return tf.TensorShape(new_dims) + + return noise_shape + + +def stateless_dropout( + x: tf.Tensor, + rate: float, + seed: tf.Tensor, + noise_shape: Optional[Union[Sequence[int], tf.TensorShape]] = None, + name: Optional[Text] = None, +) -> tf.Tensor: + """Computes dropout: randomly sets elements to zero to prevent overfitting. + + See https://www.tensorflow.org/api_docs/python/tf/nn/dropout. + This version differs in that the seed is required if the rate is nonzero. + + Args: + x: A floating point tensor. + rate: A scalar `Tensor` with the same type as x. The probability that each + element is dropped. For example, setting rate=0.1 would drop 10% of input + elements. + seed: A shape [2] integer Tensor of seeds to the random number generator. + Must have dtype `tf.int32` when compiling to XLA. + noise_shape: A 1-D `Tensor` of type `int32`, representing the shape for + randomly generated keep/drop flags. + name: A name for this operation (optional). + + Returns: + A `Tensor` of the same shape of `x`. + + Raises: + ValueError: If `rate` is not in `[0, 1)` or if `x` is not a floating point + tensor. `rate=1` is disallowed, because the output would be all zeros, + which is likely not what was intended. + """ + with tf.name_scope(name or "stateless_dropout") as name: + x = tf.convert_to_tensor(x, name="x") + if not x.dtype.is_floating: + raise ValueError( + "x has to be a floating point tensor since it's going " + " to be scaled. Got a %s tensor instead." % x.dtype + ) + if isinstance(rate, numbers.Real): + if not (rate >= 0 and rate < 1): + raise ValueError("rate must be a scalar tensor or a float in the " "range [0, 1), got %g" % rate) + if rate > 0.5: + logging.log_first_n( + logging.WARN, + "Large dropout rate: %g (>0.5). In TensorFlow " + ".x, dropout() uses dropout rate instead of keep_prob. " + "Please ensure that this is intended.", + 5, + rate, + ) + + # Early return if nothing needs to be dropped. + if tf.get_static_value(rate) == 0: + return x + + rate = tf.convert_to_tensor(rate, dtype=x.dtype, name="rate") + rate.shape.assert_has_rank(0) + noise_shape = _get_noise_shape(x, noise_shape) + # Sample a uniform distribution on [0.0, 1.0) and select values larger than + # rate. + # + # NOTE: Random uniform actually can only generate 2^23 floats on [1.0, 2.0) + # and subtract 1.0. + random_tensor = tf.random.stateless_uniform(noise_shape, seed=seed, dtype=x.dtype) + keep_prob = 1 - rate + scale = 1 / keep_prob + # NOTE: if (1.0 + rate) - 1 is equal to rate, then we want to consider that + # float to be selected, hence we use a >= comparison. + keep_mask = random_tensor >= rate + ret = x * scale * tf.cast(keep_mask, x.dtype) + if not tf.executing_eagerly(): + ret.set_shape(x.get_shape()) + return ret + + +# Reimplements internal function +# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/framework/smart_cond.py. +def smart_cond(pred, true_fn=None, false_fn=None, name=None): + """Return either `true_fn()` if predicate `pred` is true else `false_fn()`. + + If `pred` is a bool or has a constant value, we return either `true_fn()` + or `false_fn()`, otherwise we use `tf.cond` to dynamically route to both. + + Arguments: + pred: A scalar determining whether to return the result of `true_fn` or + `false_fn`. + true_fn: The callable to be performed if pred is true. + false_fn: The callable to be performed if pred is false. + name: Optional name prefix when using `tf.cond`. + + Returns: + Tensors returned by the call to either `true_fn` or `false_fn`. + + Raises: + TypeError: If `true_fn` or `false_fn` is not callable. + """ + if not callable(true_fn): + raise TypeError("`true_fn` must be callable.") + if not callable(false_fn): + raise TypeError("`false_fn` must be callable.") + pred_value = tf.get_static_value(pred) + if isinstance(pred, tf.Variable) or pred_value is None: + return tf.cond(pred, true_fn=true_fn, false_fn=false_fn, name=name) + if pred_value: + return true_fn() + else: + return false_fn() + + +# See https://www.tensorflow.org/api_docs/python/tf/keras/layers/Dropout. +class RecomputingDropout(tf.keras.layers.Layer): + """`tf.keras.layers.Dropout` that supports `recompute_grad`.""" + + def __init__(self, rate, noise_shape=None, seed=None, force_recomputation=False, **kwargs): + """Initializes `RecomputingDropout`. + + Args: + rate: Float between 0 and 1. Fraction of the input units to drop. + noise_shape: 1D integer tensor representing the shape of the binary + dropout mask that will be multiplied with the input. For instance, if + inputs have shape `(batch_size, timesteps, features)` and you want the + dropout mask to be the same for all timesteps, you can use + `noise_shape=(batch_size, 1, features)`. + seed: A Python integer to use as random seed. + force_recomputation: If `True`, then raises an error if called outside a + recompute context. + **kwargs: Keyword arguments for `tf.keras.layers.Layer`. + """ + + super(RecomputingDropout, self).__init__(**kwargs) + self.rate = rate + self.noise_shape = noise_shape + self.seed = seed + self.force_recomputation = force_recomputation + self.supports_masking = True + # Create a layer-specific seed to combine with the global recompute seed. + self._recompute_seed = np.random.randint(-(2 ** 31), 2 ** 31, dtype=np.int32) if seed is None else seed + + def _get_noise_shape(self, inputs): + # Subclasses of `Dropout` may implement `_get_noise_shape(self, inputs)`, + # which will override `self.noise_shape`, and allows for custom noise + # shapes with dynamically sized inputs. + if self.noise_shape is None: + return None + + concrete_inputs_shape = tf.shape(inputs) + noise_shape = [] + for i, value in enumerate(self.noise_shape): + noise_shape.append(concrete_inputs_shape[i] if value is None else value) + return tf.convert_to_tensor(noise_shape) + + def call(self, inputs, training=None): + """Builds computation graph. + + Args: + inputs: Input tensor (of any rank). + training: Python boolean indicating whether the layer should behave in + training mode (adding dropout) or in inference mode (doing nothing). + + Returns: + `inputs` masked according to layer configuration. + + Raises: + ValueError: If `force_recomputation` is `True` and called outside a + a recompute context. + """ + if self.rate == 0: + return inputs + + if training is None: + training = tf.keras.backend.learning_phase() + + def dropped_inputs(): + """Randomly drops elements of `inputs` when `training=True`.""" + recompute_context = get_recompute_context() + if recompute_context is None: + if self.force_recomputation: + raise ValueError("RecomputeContext is required when force_recomputation=True.") + return tf.nn.dropout( + inputs, + noise_shape=self._get_noise_shape(inputs), + seed=self.seed, + rate=self.rate, + ) + seed = tf.stack([recompute_context.seed, self._recompute_seed]) + return stateless_dropout( + inputs, + rate=self.rate, + seed=seed, + noise_shape=self._get_noise_shape(inputs), + ) + + output = smart_cond(training, dropped_inputs, lambda: tf.identity(inputs)) + return output + + def compute_output_shape(self, input_shape): + return input_shape + + def get_config(self): + config = { + "rate": self.rate, + "noise_shape": self.noise_shape, + "seed": self.seed, + "force_recomputation": self.force_recomputation, + } + base_config = super(RecomputingDropout, self).get_config() + return dict(list(base_config.items()) + list(config.items())) diff --git a/pretrain/kobigbird/training_utils.py b/pretrain/kobigbird/training_utils.py new file mode 100644 index 0000000..964969a --- /dev/null +++ b/pretrain/kobigbird/training_utils.py @@ -0,0 +1,90 @@ +# coding=utf-8 +# Copyright 2020 The Google Research Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for training the models.""" + +from __future__ import absolute_import, division, print_function + +import datetime +import re +import sys +import time + +import tensorflow.compat.v1 as tf + + +def sys_log(*args): + msg = " ".join(map(str, args)) + sys.stdout.write(msg + "\n") + sys.stdout.flush() + + +class ETAHook(tf.estimator.SessionRunHook): + """Print out the time remaining during training/evaluation.""" + + def __init__(self, to_log, n_steps, iterations_per_loop, on_tpu, log_every=1, is_training=True): + self._to_log = to_log + self._n_steps = n_steps + self._iterations_per_loop = iterations_per_loop + self._on_tpu = on_tpu + self._log_every = log_every + self._is_training = is_training + self._steps_run_so_far = 0 + self._global_step = None + self._global_step_tensor = None + self._start_step = None + self._start_time = None + + def begin(self): + self._global_step_tensor = tf.train.get_or_create_global_step() + + def before_run(self, run_context): + if self._start_time is None: + self._start_time = time.time() + return tf.estimator.SessionRunArgs(self._to_log) + + def after_run(self, run_context, run_values): + self._global_step = run_context.session.run(self._global_step_tensor) + self._steps_run_so_far += self._iterations_per_loop if self._on_tpu else 1 + if self._start_step is None: + self._start_step = self._global_step - (self._iterations_per_loop if self._on_tpu else 1) + self.log(run_values) + + def end(self, session): + self._global_step = session.run(self._global_step_tensor) + self.log() + + def log(self, run_values=None): + step = self._global_step if self._is_training else self._steps_run_so_far + if step % self._log_every != 0: + return + msg = "{:}/{:} = {:.1f}%".format(step, self._n_steps, 100.0 * step / self._n_steps) + time_elapsed = time.time() - self._start_time + time_per_step = time_elapsed / ((step - self._start_step) if self._is_training else step) + msg += ", SPS: {:.1f}".format(1 / time_per_step) + msg += ", ELAP: " + secs_to_str(time_elapsed) + msg += ", ETA: " + secs_to_str((self._n_steps - step) * time_per_step) + if run_values is not None: + for tag, value in run_values.results.items(): + msg += " - " + str(tag) + (": {:.4f}".format(value)) + sys_log(msg) + + +def secs_to_str(secs): + s = str(datetime.timedelta(seconds=int(round(secs)))) + s = re.sub("^0:", "", s) + s = re.sub("^0", "", s) + s = re.sub("^0:", "", s) + s = re.sub("^0", "", s) + return s diff --git a/pretrain/kobigbird/utils.py b/pretrain/kobigbird/utils.py new file mode 100644 index 0000000..9a9f58a --- /dev/null +++ b/pretrain/kobigbird/utils.py @@ -0,0 +1,786 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper and utility functions.""" + +import re + +import numpy as np +import tensorflow.compat.v2 as tf +from absl import logging + +############################### SHAPE UTILS #################################### + + +def get_shape_list(tensor, expected_rank=None, name=None): + """Returns a list of the shape of tensor, preferring static dimensions. + + Args: + tensor: A tf.Tensor object to find the shape of. + expected_rank: (optional) int. The expected rank of `tensor`. If this is + specified and the `tensor` has a different rank, and exception will be + thrown. + name: Optional name of the tensor for the error message. + + Returns: + A list of dimensions of the shape of tensor. All static dimensions will + be returned as python integers, and dynamic dimensions will be returned + as tf.Tensor scalars. + """ + if not tf.executing_eagerly() and name is None: + name = tensor.name + + if expected_rank is not None: + assert_rank(tensor, expected_rank, name) + + shape = tensor.shape.as_list() + + non_static_indexes = [] + for (index, dim) in enumerate(shape): + if dim is None: + non_static_indexes.append(index) + + if not non_static_indexes: + return shape + + # assert False, "Static shape not available for {}".format(tensor) + + dyn_shape = tf.shape(tensor) + for index in non_static_indexes: + shape[index] = dyn_shape[index] + return shape + + +def reshape_to_matrix(input_tensor): + """Reshapes a >= rank 2 tensor to a rank 2 tensor (i.e., a matrix).""" + ndims = input_tensor.shape.ndims + if ndims < 2: + raise ValueError("Input tensor must have at least rank 2. Shape = %s" % (input_tensor.shape)) + if ndims == 2: + return input_tensor + + width = input_tensor.shape[-1] + output_tensor = tf.reshape(input_tensor, [-1, width]) + return output_tensor + + +def reshape_from_matrix(output_tensor, orig_shape_list): + """Reshapes a rank 2 tensor back to its original rank >= 2 tensor.""" + if len(orig_shape_list) == 2: + return output_tensor + + output_shape = get_shape_list(output_tensor) + + orig_dims = orig_shape_list[0:-1] + width = output_shape[-1] + + return tf.reshape(output_tensor, orig_dims + [width]) + + +def assert_rank(tensor, expected_rank, name=None): + """Raises an exception if the tensor rank is not of the expected rank. + + Args: + tensor: A tf.Tensor to check the rank of. + expected_rank: Python integer or list of integers, expected rank. + name: Optional name of the tensor for the error message. + + Raises: + ValueError: If the expected shape doesn't match the actual shape. + """ + if not tf.executing_eagerly() and name is None: + name = tensor.name + + expected_rank_dict = {} + if isinstance(expected_rank, int): + expected_rank_dict[expected_rank] = True + else: + for x in expected_rank: + expected_rank_dict[x] = True + + actual_rank = tensor.shape.ndims + if actual_rank not in expected_rank_dict: + scope_name = tf.compat.v1.get_variable_scope().name + raise ValueError( + "For the tensor `{}` in scope `{}`, the actual rank " + "`{}` (shape = {}) is not equal to the expected rank `{}`".format( + name, scope_name, actual_rank, str(tensor.shape), str(expected_rank) + ) + ) + + +############################### DENSE LAYERS ################################### + + +def create_initializer(initializer_range=0.02): + """Creates a `truncated_normal_initializer` with the given range.""" + return tf.compat.v1.truncated_normal_initializer(stddev=initializer_range) + + +class Dense3dLayer(tf.keras.layers.Layer): + """A dense layer with 3D kernel.""" + + def __init__( + self, + num_attention_heads, + size_per_head, + initializer, + activation, + name=None, + head_first=False, + use_bias=True, + ): + """Constructor for dense layer with 3D kernel. + + Args: + num_attention_heads: The size of output dimension. + size_per_head: The size per attention head. + initializer: Kernel initializer. + activation: Actication function. + name: The name scope of this layer. + head_first: Whether to output head dimension before or after sequence dim. + use_bias: Whether the layer uses a bias vector. + """ + super(Dense3dLayer, self).__init__(name=name) + self.num_attention_heads = num_attention_heads + self.size_per_head = size_per_head + self.initializer = initializer + self.activation = activation + self.head_first = head_first + self.use_bias = use_bias + + with tf.compat.v1.variable_scope(name): + hidden_size = self.num_attention_heads * self.size_per_head + self.w = tf.compat.v1.get_variable( + name="kernel", + shape=[hidden_size, hidden_size], + initializer=self.initializer, + ) + + if self.use_bias: + self.b = tf.compat.v1.get_variable(name="bias", shape=[hidden_size], initializer=tf.zeros_initializer()) + else: + self.b = None + + def call(self, input_tensor): + """Constructor for dense layer with 3D kernel. + + Args: + input_tensor: float Tensor of shape [batch, seq_length, hidden_size]. + + Returns: + float logits Tensor. + """ + hidden_size = self.num_attention_heads * self.size_per_head + reshape_w = tf.reshape(self.w, [hidden_size, self.num_attention_heads, self.size_per_head]) + if self.head_first: + ret = tf.einsum("abc,cde->adbe", input_tensor, reshape_w) + else: + ret = tf.einsum("abc,cde->abde", input_tensor, reshape_w) + + if self.use_bias: + if self.head_first: + reshape_b = tf.reshape(self.b, [1, self.num_attention_heads, 1, self.size_per_head]) + else: + reshape_b = tf.reshape(self.b, [self.num_attention_heads, self.size_per_head]) + ret += reshape_b + + if self.activation is not None: + return self.activation(ret) + else: + return ret + + +class Dense3dProjLayer(tf.keras.layers.Layer): + """A dense layer with 3D kernel for projection.""" + + def __init__( + self, + num_attention_heads, + size_per_head, + initializer, + activation, + name=None, + use_bias=True, + ): + """Constructor for dense layer with 3D kernel for projection. + + Args: + num_attention_heads: The size of output dimension. + size_per_head: The size per attention head. + initializer: Kernel initializer. + activation: Actication function. + name: The name scope of this layer. + use_bias: Whether the layer uses a bias vector. + """ + super(Dense3dProjLayer, self).__init__(name=name) + self.num_attention_heads = num_attention_heads + self.size_per_head = size_per_head + self.initializer = initializer + self.activation = activation + self.use_bias = use_bias + + with tf.compat.v1.variable_scope(name): + hidden_size = self.num_attention_heads * self.size_per_head + self.w = tf.compat.v1.get_variable( + name="kernel", + shape=[hidden_size, hidden_size], + initializer=self.initializer, + ) + + if self.use_bias: + self.b = tf.compat.v1.get_variable(name="bias", shape=[hidden_size], initializer=tf.zeros_initializer()) + else: + self.b = None + + def call(self, input_tensor): + """Constructor for dense layer with 3D kernel for projection. + + Args: + input_tensor: float Tensor of shape [batch,from_seq_length, + num_attention_heads, size_per_head]. + + Returns: + float logits Tensor. + """ + hidden_size = self.num_attention_heads * self.size_per_head + reshape_w = tf.reshape(self.w, [self.num_attention_heads, self.size_per_head, hidden_size]) + ret = tf.einsum("BFNH,NHD->BFD", input_tensor, reshape_w) + + if self.use_bias: + ret += self.b + + if self.activation is not None: + return self.activation(ret) + else: + return ret + + +class Dense2dLayer(tf.keras.layers.Layer): + """A dense layer with 2D kernel.""" + + def __init__(self, input_size, output_size, initializer, activation, name=None, use_bias=True): + """Constructor for dense layer with 2D kernel. + + Args: + input_size: The size of input dimension. + output_size: The size of output dimension. + initializer: Kernel initializer. + activation: Actication function. + name: The name scope of this layer. + use_bias: Whether the layer uses a bias vector. + """ + super(Dense2dLayer, self).__init__(name=name) + self.input_size = input_size + self.output_size = output_size + self.initializer = initializer + self.activation = activation + self.use_bias = use_bias + + with tf.compat.v1.variable_scope(name): + self.w = tf.compat.v1.get_variable( + name="kernel", + shape=[self.input_size, self.output_size], + initializer=self.initializer, + ) + + if self.use_bias: + self.b = tf.compat.v1.get_variable( + name="bias", + shape=[self.output_size], + initializer=tf.zeros_initializer(), + ) + else: + self.b = None + + def call(self, input_tensor): + """Forward pass for dense layer with 2D kernel. + + Args: + input_tensor: Float tensor with rank 3. + + Returns: + float logits Tensor. + """ + ret = tf.einsum("abc,cd->abd", input_tensor, self.w) + + if self.use_bias: + ret += self.b + + if self.activation is not None: + return self.activation(ret) + else: + return ret + + +class SimpleDenseLayer(tf.keras.layers.Layer): + """A simple dense layer with 2D kernel.""" + + def __init__(self, input_size, output_size, initializer, activation, name=None, use_bias=True): + """Constructor for dense layer with 2D kernel. + + Args: + input_size: The size of input dimension. + output_size: The size of output dimension. + initializer: Kernel initializer. + activation: Actication function. + name: The name scope of this layer. + use_bias: Whether the layer uses a bias vector. + """ + super(SimpleDenseLayer, self).__init__(name=name) + self.input_size = input_size + self.output_size = output_size + self.initializer = initializer + self.activation = activation + self.use_bias = use_bias + + with tf.compat.v1.variable_scope(name): + self.w = tf.compat.v1.get_variable( + name="kernel", + shape=[self.input_size, self.output_size], + initializer=self.initializer, + ) + + if self.use_bias: + self.b = tf.compat.v1.get_variable( + name="bias", + shape=[self.output_size], + initializer=tf.zeros_initializer(), + ) + else: + self.b = None + + def call(self, input_tensor): + """Forward pass for dense layer with 2D kernel. + + Args: + input_tensor: Float tensor with rank 2. + + Returns: + float logits Tensor. + """ + ret = tf.einsum("ab,bc->ac", input_tensor, self.w) + + if self.use_bias: + ret += self.b + + if self.activation is not None: + return self.activation(ret) + else: + return ret + + +def gelu(x): + """Gaussian Error Linear Unit. + + This is a smoother version of the RELU. + Original paper: https://arxiv.org/abs/1606.08415 + Args: + x: float Tensor to perform activation. + + Returns: + `x` with the GELU activation applied. + """ + cdf = 0.5 * (1.0 + tf.tanh((np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3))))) + return x * cdf + + +def get_activation(activation_string): + """Maps a string to a Python function, e.g., "relu" => `tf.nn.relu`. + + Args: + activation_string: String name of the activation function. + + Returns: + A Python function corresponding to the activation function. If + `activation_string` is None, empty, or "linear", this will return None. + If `activation_string` is not a string, it will return `activation_string`. + + Raises: + ValueError: The `activation_string` does not correspond to a known + activation. + """ + + # We assume that anything that"s not a string is already an activation + # function, so we just return it. + if not isinstance(activation_string, str): + return activation_string + + if not activation_string: + return None + + act = activation_string.lower() + if act == "linear": + return None + elif act == "relu": + return tf.nn.relu + elif act == "gelu": + return gelu + elif act == "tanh": + return tf.tanh + else: + raise ValueError("Unsupported activation: %s" % act) + + +############################## NORM LAYERS ##################################### + + +class NormLayer(tf.keras.layers.Layer): + """Replacement for contrib_layers.layer_norm.""" + + def __init__(self, hdim, dtype=tf.float32, name="LayerNorm"): + super(NormLayer, self).__init__(name=name) + self._dtype = dtype + + with tf.compat.v1.variable_scope(name): + self.beta = tf.compat.v1.get_variable("beta", [hdim], dtype=dtype, initializer=tf.zeros_initializer()) + self.gamma = tf.compat.v1.get_variable("gamma", [hdim], dtype=dtype, initializer=tf.ones_initializer()) + + def call(self, inputs): + inputs_shape = inputs.shape + + # Compute norm along last axis + mean, variance = tf.nn.moments(inputs, [-1], keepdims=True) + # Compute layer normalization using the batch_normalization function. + # Note that epsilon must be increased for float16 due to the limited + # representable range. + variance_epsilon = 1e-12 if self._dtype != tf.float16 else 1e-3 + outputs = tf.nn.batch_normalization( + inputs, + mean, + variance, + offset=self.beta, + scale=self.gamma, + variance_epsilon=variance_epsilon, + ) + outputs.set_shape(inputs_shape) + return outputs + + +############################# EMBEDDING LAYER ################################## + + +class EmbeddingLayer(tf.keras.layers.Layer): + """An embedding layer.""" + + def __init__( + self, + vocab_size, + emb_dim, + initializer, + scale_emb=False, + use_token_type=True, + num_token_types=2, + use_position_embeddings=True, + max_position_embeddings=4096, + dropout_prob=0.0, + name="embeddings", + ): + super(EmbeddingLayer, self).__init__(name=name) + self.vocab_size = vocab_size + self.emb_dim = emb_dim + self.scale_emb = scale_emb + self.num_token_types = num_token_types + self.max_position_embeddings = max_position_embeddings + self.dropout_prob = dropout_prob + + with tf.compat.v1.variable_scope(name): + self.word_embeddings = tf.compat.v1.get_variable( + "word_embeddings", + [vocab_size, emb_dim], + dtype=tf.float32, + initializer=initializer, + ) + + if use_token_type: + self.token_type_table = tf.compat.v1.get_variable( + "token_type_embeddings", + [num_token_types, emb_dim], + dtype=tf.float32, + initializer=initializer, + ) + else: + self.token_type_table = None + + if use_position_embeddings: + self.position_embeddings = tf.compat.v1.get_variable( + "position_embeddings", + [max_position_embeddings, emb_dim], + dtype=tf.float32, + initializer=initializer, + ) + else: + self.position_embeddings = None + + def call(self, input_ids, seq_length, start_pos=0, token_type_ids=None, training=None): + if input_ids is None: + return None + + # subtoken embedding + output = tf.nn.embedding_lookup(params=self.word_embeddings, ids=input_ids) + + if self.scale_emb: + output = output * self.emb_dim ** 0.5 + + if self.token_type_table is not None: + # This vocab will be small so we always do one-hot here, since it is + # always faster for a small vocabulary. + one_hot_ids = tf.one_hot(token_type_ids, depth=self.num_token_types) + token_type_embeddings = tf.tensordot(one_hot_ids, self.token_type_table, 1) + output += token_type_embeddings + + if self.position_embeddings is not None: + # assert_op = tf.compat.v1.assert_less_equal( + # start_pos + seq_length, self.max_position_embeddings) + # with tf.control_dependencies([assert_op]): + # So `position_embeddings` is effectively an embedding table for + # position [0, 1, 2, ..., max_position_embeddings-1], and the current + # sequence has positions [0, 1, 2, ... seq_length-1], so we can just + # perform a slice. + position_embeddings = tf.slice(self.position_embeddings, [start_pos, 0], [seq_length, self.emb_dim]) + output += tf.expand_dims(position_embeddings, axis=0) + + if training and self.dropout_prob > 0: + output = tf.nn.dropout(output, self.dropout_prob) + + return output + + def linear(self, x): + """Computes logits by running x through a linear layer. + + Args: + x: A float32 tensor with shape [..., hidden_size] + Returns: + float32 tensor with shape [..., vocab_size]. + """ + with tf.compat.v1.name_scope("presoftmax_linear"): + logits = tf.tensordot(x, self.word_embeddings, [[-1], [1]]) + return logits + + +########################## TPU/CHECKPOINT UTILS ################################ + + +def get_estimator(config, model_fn): + """Create TPUEstimator object for given config and model_fn.""" + tpu_cluster_resolver = None + if config["use_tpu"] and config["tpu_name"]: + tpu_cluster_resolver = tf.distribute.cluster_resolver.TPUClusterResolver( + config["tpu_name"], zone=config["tpu_zone"], project=config["gcp_project"] + ) + + # Batch size book-keeping + # Estimators handle batch sizes differently among GPUs and TPUs + # GPU: Estimator needs per core batch size + # TPU: Estimator needs total batch size, i.e. num_cores * per core batch size + config_train_batch_size = config["train_batch_size"] # For estimator + config_eval_batch_size = config["eval_batch_size"] # For estimator + effective_train_batch_size = config["train_batch_size"] # For human + effective_eval_batch_size = config["eval_batch_size"] # For human + session_config = None + if config["use_tpu"]: + sliced_eval_mode = tf.compat.v1.estimator.tpu.InputPipelineConfig.SLICED + distribute_strategy = None + config_train_batch_size *= config["num_tpu_cores"] + config_eval_batch_size *= config["num_tpu_cores"] + effective_train_batch_size = config_train_batch_size + effective_eval_batch_size = config_eval_batch_size + else: + session_config = tf.compat.v1.ConfigProto( + allow_soft_placement=True, + gpu_options=tf.compat.v1.GPUOptions(per_process_gpu_memory_fraction=1.2), + ) + cluster_resolver = tf.distribute.cluster_resolver.TFConfigClusterResolver() + with tf.compat.v1.Session(cluster_resolver.master(), config=session_config) as sess: + logging.info(sess.list_devices()) + sliced_eval_mode = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V1 + distribute_strategy = tf.distribute.MirroredStrategy(devices=None) + effective_train_batch_size *= distribute_strategy.num_replicas_in_sync + # effective_eval_batch_size *= distribute_strategy.num_replicas_in_sync + + is_per_host = tf.compat.v1.estimator.tpu.InputPipelineConfig.PER_HOST_V2 + run_config = tf.compat.v1.estimator.tpu.RunConfig( + cluster=tpu_cluster_resolver, + master=config["master"], + model_dir=config["output_dir"], + save_checkpoints_steps=config["save_checkpoints_steps"], + keep_checkpoint_max=config["keep_checkpoint_max"], + train_distribute=distribute_strategy, + session_config=session_config, + tpu_config=tf.compat.v1.estimator.tpu.TPUConfig( + tpu_job_name=config["tpu_job_name"], + iterations_per_loop=config["iterations_per_loop"], + num_shards=config["num_tpu_cores"], + per_host_input_for_training=is_per_host, + eval_training_input_configuration=sliced_eval_mode, + ), + ) + + # NOTE fixed by @monologg + if config["init_checkpoint"]: + ckpt_var_list = tf.compat.v1.train.list_variables(config["init_checkpoint"]) + ckpt_var_list = { + name: shape + for name, shape in ckpt_var_list + if not re.findall( + "(Adam|Adafactor|global_step|adam_m|adam_v|AdamWeightDecayOptimizer|AdamWeightDecayOptimizer_1)", name + ) + } + ckpt_var_list["bert/encoder/LayerNorm/beta"] = ckpt_var_list["bert/embeddings/LayerNorm/beta"] + ckpt_var_list["bert/encoder/LayerNorm/gamma"] = ckpt_var_list["bert/embeddings/LayerNorm/gamma"] + + # Because orignal bert's pos emb is 512, we random init pos emb which is bigger than 512 (e.g. 4096) + if config["random_pos_emb"]: + logging.info("*" * 40) + logging.info("Random initialize the positional embedding!!") + logging.info("*" * 40) + filtered_ckpt_var_list = [] + for ckpt_var in ckpt_var_list.keys(): + if "embeddings/position_embeddings" not in ckpt_var: + filtered_ckpt_var_list.append(ckpt_var) + vars_to_warm_start = "({})".format("|".join(filtered_ckpt_var_list)) + else: + vars_to_warm_start = "({})".format("|".join(ckpt_var_list.keys())) + warm_start_settings = tf.estimator.WarmStartSettings( + ckpt_to_initialize_from=config["init_checkpoint"], + vars_to_warm_start=vars_to_warm_start, + var_name_to_prev_var_name={ + "bert/encoder/LayerNorm/beta": "bert/embeddings/LayerNorm/beta", + "bert/encoder/LayerNorm/gamma": "bert/embeddings/LayerNorm/gamma", + }, + ) + else: + ckpt_var_list = {} + warm_start_settings = None + config["ckpt_var_list"] = ckpt_var_list + + # If no TPU, this will fall back to normal Estimator on CPU or GPU. + estimator = tf.compat.v1.estimator.tpu.TPUEstimator( + use_tpu=config["use_tpu"], + model_fn=model_fn, + config=run_config, + train_batch_size=config_train_batch_size, + eval_batch_size=config_eval_batch_size, + warm_start_from=warm_start_settings, + ) + + # assign batch sizes + estimator.train_batch_size = effective_train_batch_size + estimator.eval_batch_size = effective_eval_batch_size + + return estimator + + +def log_variables(variables, ckpt_var_list): + """Log trainable variables.""" + logging.info("**** Trainable Variables ****") + + model_var_list = {var.name: var.get_shape().as_list() for var in variables} + num_params = sum(np.prod(shape) for shape in model_var_list.values()) + length = max(len(name) for name in model_var_list) + 2 + line = "{{:<{}}}{{:<13}}{{}}".format(length) + + logging.info("The model has {} trainable variables " "({:,} parameters):\n".format(len(model_var_list), num_params)) + logging.info(line.format("Name", "Initialized", "Shape")) + logging.info(line.format("----", "-----------", "-----")) + + ckpt_var_list = ckpt_var_list.copy() + for name, shape in model_var_list.items(): + name = name.split(":")[0] + if name in ckpt_var_list: + warm_started = "from ckpt" + del ckpt_var_list[name] + else: + warm_started = "random" + logging.info(line.format(name, warm_started, shape)) + + if ckpt_var_list: + logging.warning( + "The warm start checkpoint contained %d variables that were not used " "for the model:\n", + len(ckpt_var_list), + ) + for name, shape in ckpt_var_list.items(): + logging.warning(line.format(name, "not used", shape)) + + +def add_scalars_to_summary(summary_dir, scalar_tensors_dict): + """Creates a host_call function that writes summaries on TPU.""" + + # All tensors outfed from TPU should preserve batch size dimension. + scalar_tensors_dict = {k: tf.reshape(v, [1]) for k, v in scalar_tensors_dict.items()} + + def host_call_fn(**kwargs): + writer = tf.summary.create_file_writer(summary_dir, max_queue=1000) + always_record = tf.summary.record_if(True) + with writer.as_default(), always_record: + for name, scalar in kwargs.items(): + tf.summary.scalar( + name, + tf.reduce_mean(scalar), + tf.compat.v1.train.get_or_create_global_step(), + ) + return tf.compat.v1.summary.all_v2_summary_ops() + + return host_call_fn, scalar_tensors_dict + + +########################## DEFAULT CONFIG UTILS ################################ + + +def get_default_config(): + """Default values for BigBird.""" + + default_config = { + # transformer basic configs + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 4096, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "use_bias": True, + "rescale_embedding": False, + "scope": "bert", + # sparse mask configs + "attention_type": "block_sparse", + "norm_type": "postnorm", + "block_size": 16, + "num_rand_blocks": 3, + # common bert configs + "max_encoder_length": 1024, + "max_decoder_length": 64, + "couple_encoder_decoder": False, + "beam_size": 5, + "alpha": 0.7, + "label_smoothing": 0.1, + "weight_decay_rate": 0.01, + "optimizer_beta1": 0.9, + "optimizer_beta2": 0.999, + "optimizer_epsilon": 1e-6, + # TPU settings + "use_tpu": True, + "tpu_name": None, + "tpu_zone": None, + "tpu_job_name": None, + "gcp_project": None, + "master": None, + "num_tpu_cores": 8, + "iterations_per_loop": "1000", + } + + return default_config diff --git a/pretrain/requirements.txt b/pretrain/requirements.txt new file mode 100644 index 0000000..109b189 --- /dev/null +++ b/pretrain/requirements.txt @@ -0,0 +1,4 @@ +absl-py +tensorflow==2.3.1 +transformers==4.11.3 +ko_lm_dataformat>=0.1.0 diff --git a/pretrain/run_pretraining.py b/pretrain/run_pretraining.py new file mode 100644 index 0000000..eb5d231 --- /dev/null +++ b/pretrain/run_pretraining.py @@ -0,0 +1,529 @@ +# Copyright 2021 The BigBird Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run masked LM/next sentence pre-training for BigBird.""" + +import os +import time + +import tensorflow.compat.v2 as tf +from absl import app, logging +from kobigbird import flags, modeling, optimization, training_utils, utils + +FLAGS = flags.FLAGS + +## Required parameters + +flags.DEFINE_string("data_dir", "pretrain_tfrecords", "The input data dir. Should contain the TFRecord files.") + +flags.DEFINE_string( + "output_dir", + "/tmp/bigb", + "The output directory where the model checkpoints will be written.", +) + +## Other parameters +flags.DEFINE_string( + "init_checkpoint", + None, + "Initial checkpoint (usually from a pre-trained BERT model).", +) + +flags.DEFINE_integer( + "max_encoder_length", + 4096, + "The maximum total input sequence length after BERT Wordpiece tokenization. " + "Sequences longer than this will be truncated, and sequences shorter " + "than this will be padded.", +) + +flags.DEFINE_integer( + "max_predictions_per_seq", + 640, + "Maximum number of masked LM predictions per sequence.", +) + +flags.DEFINE_float("masked_lm_prob", 0.15, "Masked LM probability.") + +flags.DEFINE_bool("do_train", True, "Whether to run training.") + +flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.") + +flags.DEFINE_integer( + "train_batch_size", + 4, + "Local batch size for training. " "Total batch size will be multiplied by number gpu/tpu cores available.", +) + +flags.DEFINE_integer( + "eval_batch_size", + 4, + "Local batch size for eval. " "Total batch size will be multiplied by number gpu/tpu cores available.", +) + +flags.DEFINE_string( + "optimizer", + "AdamWeightDecay", + "Optimizer to use. Can be Adafactor, Adam, and AdamWeightDecay.", +) + +flags.DEFINE_float("learning_rate", 1e-4, "The initial learning rate for Adam.") + +flags.DEFINE_integer("num_train_steps", 100000, "Total number of training steps to perform.") + +flags.DEFINE_integer("num_warmup_steps", 10000, "Number of steps to perform linear warmup.") + +flags.DEFINE_integer("save_checkpoints_steps", 1000, "How often to save the model checkpoint.") + +flags.DEFINE_integer("max_eval_steps", 100, "Maximum number of eval steps.") + +flags.DEFINE_bool("use_nsp", False, "Whether to use next sentence prediction loss.") + +flags.DEFINE_integer("keep_checkpoint_max", 5, "How many checkpoints to keep.") + +flags.DEFINE_bool( + "random_pos_emb", True, "Random initialize for positional embedding (original 512 doesn't matched to 4096)" +) + +flags.DEFINE_integer("seed", 42, "Seed for Pretraining") + + +def input_fn_builder( + data_dir, + max_encoder_length, + max_predictions_per_seq, + is_training, +): + """Creates an `input_fn` closure to be passed to TPUEstimator.""" + + feature_shapes = { + "input_ids": [max_encoder_length], + "segment_ids": [max_encoder_length], + "masked_lm_positions": [max_predictions_per_seq], + "masked_lm_ids": [max_predictions_per_seq], + "masked_lm_weights": [max_predictions_per_seq], + "next_sentence_labels": [1], + } + + def _decode_record(record): + """Decodes a record to a TensorFlow example.""" + name_to_features = { + "input_ids": tf.io.FixedLenFeature([max_encoder_length], tf.int64), + "segment_ids": tf.io.FixedLenFeature([max_encoder_length], tf.int64), + "masked_lm_positions": tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64), + "masked_lm_ids": tf.io.FixedLenFeature([max_predictions_per_seq], tf.int64), + "masked_lm_weights": tf.io.FixedLenFeature([max_predictions_per_seq], tf.float32), + "next_sentence_labels": tf.io.FixedLenFeature([1], tf.int64), # NOTE Not using NSP task + } + example = tf.io.parse_single_example(record, name_to_features) + + # tf.Example only supports tf.int64, but the TPU only supports tf.int32. + # So cast all int64 to int32. + for name in list(example.keys()): + t = example[name] + if t.dtype == tf.int64: + t = tf.cast(t, tf.int32) + example[name] = t + + return example + + def input_fn(params): + """The actual input function.""" + batch_size = params["batch_size"] + + # Load dataset and handle tfds separately + if "tfds://" == data_dir[:7]: + raise ValueError("We don't support tfds in kobigbird pretraining code") + else: + # NOTE Directly read TFRecord + input_files = tf.io.gfile.glob(os.path.join(data_dir, "pretrain_data.tfrecord*")) + + # For training, we want a lot of parallel reading and shuffling. + # For eval, we want no shuffling and parallel reading doesn't matter. + if is_training: + d = tf.data.Dataset.from_tensor_slices(tf.constant(input_files)) + d = d.shuffle(buffer_size=len(input_files)) + + # Non deterministic mode means that the interleaving is not exact. + # This adds even more randomness to the training pipeline. + d = d.interleave( + tf.data.TFRecordDataset, + deterministic=False, + num_parallel_calls=tf.data.experimental.AUTOTUNE, + ) + else: + d = tf.data.TFRecordDataset(input_files) + + # NOTE Only accept preprocessed tfrecord + d = d.map(_decode_record, num_parallel_calls=tf.data.experimental.AUTOTUNE) + + if is_training: + d = d.shuffle(buffer_size=10000, reshuffle_each_iteration=True) + d = d.repeat() + + d = d.padded_batch(batch_size, feature_shapes, drop_remainder=True) # For static shape + return d + + return input_fn + + +def model_fn_builder(bert_config): + """Returns `model_fn` closure for TPUEstimator.""" + + def model_fn(features, labels, mode, params): # pylint: disable=unused-argument + """The `model_fn` for TPUEstimator.""" + + is_training = mode == tf.estimator.ModeKeys.TRAIN + + model = modeling.BertModel(bert_config) + masked_lm = MaskedLMLayer( + bert_config["hidden_size"], + bert_config["vocab_size"], + model.embeder, + initializer=utils.create_initializer(bert_config["initializer_range"]), + activation_fn=utils.get_activation(bert_config["hidden_act"]), + ) + next_sentence = NSPLayer( + bert_config["hidden_size"], + initializer=utils.create_initializer(bert_config["initializer_range"]), + ) + + sequence_output, pooled_output = model( + features["input_ids"], + training=is_training, + token_type_ids=features.get("segment_ids"), + ) + + masked_lm_loss, masked_lm_log_probs = masked_lm( + sequence_output, + label_ids=features.get("masked_lm_ids"), + label_weights=features.get("masked_lm_weights"), + masked_lm_positions=features.get("masked_lm_positions"), + ) + + if bert_config["use_nsp"]: + next_sentence_loss, next_sentence_log_probs = next_sentence( + pooled_output, features.get("next_sentence_labels") + ) + total_loss = masked_lm_loss + next_sentence_loss + else: + total_loss = masked_lm_loss + + tvars = tf.compat.v1.trainable_variables() + utils.log_variables(tvars, bert_config["ckpt_var_list"]) + + output_spec = None + if mode == tf.estimator.ModeKeys.TRAIN: + + learning_rate = optimization.get_linear_warmup_linear_decay_lr( + init_lr=bert_config["learning_rate"], + num_train_steps=bert_config["num_train_steps"], + num_warmup_steps=bert_config["num_warmup_steps"], + ) + + optimizer = optimization.get_optimizer(bert_config, learning_rate) + + global_step = tf.compat.v1.train.get_global_step() + + gradients = optimizer.compute_gradients(total_loss, tvars) + train_op = optimizer.apply_gradients(gradients, global_step=global_step) + + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + train_op=train_op, + host_call=utils.add_scalars_to_summary(bert_config["output_dir"], {"learning_rate": learning_rate}), + training_hooks=[ + training_utils.ETAHook( + {} if bert_config["use_tpu"] else dict(loss=total_loss), + bert_config["num_train_steps"], + bert_config["iterations_per_loop"], + bert_config["use_tpu"], + ) + ], + ) + + elif mode == tf.estimator.ModeKeys.EVAL: + + def metric_fn( + masked_lm_loss_value, + masked_lm_log_probs, + masked_lm_ids, + masked_lm_weights, + next_sentence_loss_value, + next_sentence_log_probs, + next_sentence_labels, + ): + """Computes the loss and accuracy of the model.""" + masked_lm_predictions = tf.argmax(masked_lm_log_probs, axis=-1, output_type=tf.int32) + masked_lm_accuracy = tf.compat.v1.metrics.accuracy( + labels=masked_lm_ids, + predictions=masked_lm_predictions, + weights=masked_lm_weights, + ) + masked_lm_mean_loss = tf.compat.v1.metrics.mean(values=masked_lm_loss_value) + + next_sentence_predictions = tf.argmax(next_sentence_log_probs, axis=-1, output_type=tf.int32) + next_sentence_accuracy = tf.compat.v1.metrics.accuracy( + labels=next_sentence_labels, predictions=next_sentence_predictions + ) + next_sentence_mean_loss = tf.compat.v1.metrics.mean(values=next_sentence_loss_value) + + return { + "masked_lm_accuracy": masked_lm_accuracy, + "masked_lm_loss": masked_lm_mean_loss, + "next_sentence_accuracy": next_sentence_accuracy, + "next_sentence_loss": next_sentence_mean_loss, + } + + eval_metrics = ( + metric_fn, + [ + masked_lm_loss, + masked_lm_log_probs, + features["masked_lm_ids"], + features["masked_lm_weights"], + next_sentence_loss, + next_sentence_log_probs, + features["next_sentence_labels"], + ], + ) + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + loss=total_loss, + eval_metrics=eval_metrics, + training_hooks=[ + training_utils.ETAHook( + {} if bert_config["use_tpu"] else dict(loss=total_loss), + bert_config["max_eval_steps"], + bert_config["iterations_per_loop"], + bert_config["use_tpu"], + is_training=False, + ) + ], + ) + else: + + output_spec = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( + mode=mode, + predictions={ + "log-probabilities": masked_lm_log_probs, + "seq-embeddings": sequence_output, + }, + ) + + return output_spec + + return model_fn + + +class MaskedLMLayer(tf.keras.layers.Layer): + """Get loss and log probs for the masked LM.""" + + def __init__( + self, + hidden_size, + vocab_size, + embeder, + initializer=None, + activation_fn=None, + name="cls/predictions", + ): + super(MaskedLMLayer, self).__init__(name=name) + self.hidden_size = hidden_size + self.vocab_size = vocab_size + self.embeder = embeder + + # NOTE fixed by @monologg + # change the name of scope for BERT init checkpoint + with tf.compat.v1.variable_scope(name): + # We apply one more non-linear transformation before the output layer. + # This matrix is not used after pre-training. + self.extra_layer = utils.Dense2dLayer( + hidden_size, hidden_size, initializer, activation_fn, name="transform/dense" + ) + self.norm_layer = utils.NormLayer(hidden_size, name="transform/LayerNorm") + + # The output weights are the same as the input embeddings, but there is + # an output-only bias for each token. + self.output_bias = tf.compat.v1.get_variable( + "output_bias", shape=[vocab_size], initializer=tf.zeros_initializer() + ) + + @property + def trainable_weights(self): + self._trainable_weights = ( + self.extra_layer.trainable_weights + self.norm_layer.trainable_weights + [self.output_bias] + ) + return self._trainable_weights + + def call(self, input_tensor, label_ids=None, label_weights=None, masked_lm_positions=None): + if masked_lm_positions is not None: + input_tensor = tf.gather(input_tensor, masked_lm_positions, batch_dims=1) + + # We apply one more non-linear transformation before the output layer. + # This matrix is not used after pre-training. + input_tensor = self.extra_layer(input_tensor) + input_tensor = self.norm_layer(input_tensor) + + # The output weights are the same as the input embeddings, but there is + # an output-only bias for each token. + logits = self.embeder.linear(input_tensor) + logits = tf.nn.bias_add(logits, self.output_bias) + log_probs = tf.nn.log_softmax(logits, axis=-1) + + if label_ids is not None: + one_hot_labels = tf.one_hot(label_ids, depth=self.vocab_size, dtype=tf.float32) + + # The `positions` tensor might be zero-padded (if the sequence is too + # short to have the maximum number of predictions). The `label_weights` + # tensor has a value of 1.0 for every real prediction and 0.0 for the + # padding predictions. + per_example_loss = -tf.reduce_sum(log_probs * one_hot_labels, axis=-1) + numerator = tf.reduce_sum(label_weights * per_example_loss) + denominator = tf.reduce_sum(label_weights) + 1e-5 + loss = numerator / denominator + else: + loss = tf.constant(0.0) + + return loss, log_probs + + +class NSPLayer(tf.keras.layers.Layer): + """Get loss and log probs for the next sentence prediction.""" + + def __init__(self, hidden_size, initializer=None, name="cls/seq_relationship"): + super(NSPLayer, self).__init__(name=name) + self.hidden_size = hidden_size + + # Simple binary classification. Note that 0 is "next sentence" and 1 is + # "random sentence". This weight matrix is not used after pre-training. + with tf.compat.v1.variable_scope(name): + self.output_weights = tf.compat.v1.get_variable( + "output_weights", shape=[2, hidden_size], initializer=initializer + ) + self._trainable_weights.append(self.output_weights) + self.output_bias = tf.compat.v1.get_variable("output_bias", shape=[2], initializer=tf.zeros_initializer()) + self._trainable_weights.append(self.output_bias) + + def call(self, input_tensor, next_sentence_labels=None): + logits = tf.matmul(input_tensor, self.output_weights, transpose_b=True) + logits = tf.nn.bias_add(logits, self.output_bias) + log_probs = tf.nn.log_softmax(logits, axis=-1) + + if next_sentence_labels is not None: + labels = tf.reshape(next_sentence_labels, [-1]) + one_hot_labels = tf.one_hot(labels, depth=2, dtype=tf.float32) + per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1) + loss = tf.reduce_mean(per_example_loss) + else: + loss = tf.constant(0.0) + return loss, log_probs + + +def main(_): + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR) + tf.compat.v1.random.set_random_seed(FLAGS.seed) + + if not FLAGS.do_train and not FLAGS.do_eval: + raise ValueError("At least one of `do_train`, `do_eval` must be True.") + + bert_config = flags.as_dictionary() + + if FLAGS.max_encoder_length > bert_config["max_position_embeddings"]: + raise ValueError( + "Cannot use sequence length %d because the BERT model " + "was only trained up to sequence length %d" + % (FLAGS.max_encoder_length, bert_config["max_position_embeddings"]) + ) + + tf.io.gfile.makedirs(FLAGS.output_dir) + if FLAGS.do_train: + # Save pretrain_config.json + flags.save(os.path.join(FLAGS.output_dir, "pretrain_config.json")) + training_utils.sys_log(f"Save pretrain_config.json at `{FLAGS.output_dir}`") + + # Save tokenizer to output dir + tokenizer_output_dir = os.path.join(FLAGS.output_dir, "tokenizer") + if not tf.io.gfile.exists(tokenizer_output_dir): + tf.io.gfile.makedirs(tokenizer_output_dir) + + for filename in os.listdir(FLAGS.tokenizer_dir): + tf.io.gfile.copy( + os.path.join(FLAGS.tokenizer_dir, filename), + os.path.join(tokenizer_output_dir, filename), + ) + else: + training_utils.sys_log(f"Tokenizer is already saved at `{tokenizer_output_dir}`") + + model_fn = model_fn_builder(bert_config) + estimator = utils.get_estimator(bert_config, model_fn) + + if FLAGS.do_train: + logging.info("***** Running training *****") + logging.info(" Batch size = %d", estimator.train_batch_size) + logging.info(" Num steps = %d", FLAGS.num_train_steps) + train_input_fn = input_fn_builder( + data_dir=FLAGS.data_dir, + max_encoder_length=FLAGS.max_encoder_length, + max_predictions_per_seq=FLAGS.max_predictions_per_seq, + is_training=True, + ) + estimator.train(input_fn=train_input_fn, max_steps=FLAGS.num_train_steps) + + if FLAGS.do_eval: + logging.info("***** Running evaluation *****") + logging.info(" Batch size = %d", estimator.eval_batch_size) + + eval_input_fn = input_fn_builder( + data_dir=FLAGS.data_dir, + max_encoder_length=FLAGS.max_encoder_length, + max_predictions_per_seq=FLAGS.max_predictions_per_seq, + is_training=False, + ) + + # Run continuous evaluation for latest checkpoint as training progresses. + last_evaluated = None + while True: + latest = tf.train.latest_checkpoint(FLAGS.output_dir) + if latest == last_evaluated: + if not latest: + logging.info("No checkpoints found yet.") + else: + logging.info("Latest checkpoint %s already evaluated.", latest) + time.sleep(300) + continue + else: + logging.info("Evaluating check point %s", latest) + last_evaluated = latest + + current_step = int(os.path.basename(latest).split("-")[1]) + output_eval_file = os.path.join(FLAGS.output_dir, "eval_results_{}.txt".format(current_step)) + result = estimator.evaluate( + input_fn=eval_input_fn, + steps=FLAGS.max_eval_steps, + checkpoint_path=latest, + ) + + with tf.io.gfile.GFile(output_eval_file, "w") as writer: + logging.info("***** Eval results *****") + for key in sorted(result.keys()): + logging.info(" %s = %s", key, str(result[key])) + writer.write("%s = %s\n" % (key, str(result[key]))) + + +if __name__ == "__main__": + tf.compat.v1.disable_v2_behavior() + tf.compat.v1.enable_resource_variables() + app.run(main) diff --git a/pretrain/scripts/base_size_tpu.sh b/pretrain/scripts/base_size_tpu.sh new file mode 100644 index 0000000..8a7f5a0 --- /dev/null +++ b/pretrain/scripts/base_size_tpu.sh @@ -0,0 +1,60 @@ +#!/bin/bash + +### GCP Settings (SHOULD change for your own settings) ### +BUCKET_NAME="kobigbird" +MODEL_NAME="kobigbird-bert-base" +TPU_NAME="kobigbird-bert-base" +TPU_ZONE="europe-west4-a" +NUM_TPU_CORES=8 +########################################### + +ATTENTION_TYPE="block_sparse" +MAX_ENCODER_LENGTH=4096 +BLOCK_SIZE=64 + +NUM_TRAIN_STEPS=2000000 +SAVE_CHECKPOINTS_STEPS=100000 +NUM_WARMUP_STEPS=20000 +ITERATIONS_PER_LOOP=200 +LEARNING_RATE=1e-4 +KEEP_CHECKPOINT_MAX=20 +MAX_PREDICTIONS_PER_SEQ=640 +TRAIN_BATCH_SIZE_PER_DEVICE=4 +EVAL_BATCH_SIZE_PER_DEVICE=8 + +USE_GRADIENT_CHECKPOINTING=False +USE_NSP=False +RANDOM_POS_EMB=True + +SEED=42 + +python3 run_pretraining.py \ + --data_dir="gs://$BUCKET_NAME/pretrain_tfrecords" \ + --output_dir="gs://$BUCKET_NAME/models/$MODEL_NAME" \ + --attention_type=$ATTENTION_TYPE \ + --max_encoder_length=$MAX_ENCODER_LENGTH \ + --max_position_embeddings=$MAX_ENCODER_LENGTH \ + --block_size=$BLOCK_SIZE \ + --num_attention_heads=12 \ + --num_hidden_layers=12 \ + --hidden_size=768 \ + --intermediate_size=3072 \ + --do_train=True \ + --do_eval=False \ + --use_tpu=True \ + --tpu_name=$TPU_NAME \ + --tpu_zone=$TPU_ZONE \ + --num_tpu_cores=$NUM_TPU_CORES \ + --train_batch_size=$TRAIN_BATCH_SIZE_PER_DEVICE \ + --eval_batch_size=$EVAL_BATCH_SIZE_PER_DEVICE \ + --num_train_steps=$NUM_TRAIN_STEPS \ + --num_warmup_steps=$NUM_WARMUP_STEPS \ + --save_checkpoints_steps=$SAVE_CHECKPOINTS_STEPS \ + --learning_rate=$LEARNING_RATE \ + --keep_checkpoint_max=$KEEP_CHECKPOINT_MAX \ + --use_gradient_checkpointing=$USE_GRADIENT_CHECKPOINTING \ + --use_nsp=$USE_NSP \ + --max_predictions_per_seq=$MAX_PREDICTIONS_PER_SEQ \ + --random_pos_emb=$RANDOM_POS_EMB \ + --iterations_per_loop=$ITERATIONS_PER_LOOP \ + --seed=$SEED diff --git a/pretrain/scripts/build_tfrecord.sh b/pretrain/scripts/build_tfrecord.sh new file mode 100644 index 0000000..2b79196 --- /dev/null +++ b/pretrain/scripts/build_tfrecord.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +### SHOULD change for your own settings ### +INPUT_DIR=data +OUTPUT_DIR=pretrain_tfrecords +TOKENIZER_DIR=tokenizer +########################################### + +NUM_PROCESSES=0 +DUPE_FACTOR=3 + +MAX_SEQ_LENGTH=4096 +MASKED_LM_PROB=0.15 +MAX_PREDICTIONS_PER_SEQ=640 + +SENTENCE_PAIR_PROB=0.0 +SHORT_SEQ_PROB=0.0 +MAX_NGRAM_SIZE=2 + +LONG_SEQ_THRESHOLD=1.8 + +python3 create_pretraining_data.py \ + --input_dir $INPUT_DIR \ + --tokenizer_dir $TOKENIZER_DIR \ + --output_dir $OUTPUT_DIR \ + --max_seq_length $MAX_SEQ_LENGTH \ + --max_predictions_per_seq $MAX_PREDICTIONS_PER_SEQ \ + --num_processes $NUM_PROCESSES \ + --masked_lm_prob $MASKED_LM_PROB \ + --sentence_pair_prob $SENTENCE_PAIR_PROB \ + --short_seq_prob $SHORT_SEQ_PROB \ + --do_whole_word_mask \ + --max_ngram_size $MAX_NGRAM_SIZE \ + --dupe_factor $DUPE_FACTOR \ + --long_seq_threshold $LONG_SEQ_THRESHOLD \ + --debug diff --git a/pretrain/tokenizer/special_tokens_map.json b/pretrain/tokenizer/special_tokens_map.json new file mode 100644 index 0000000..f40edd4 --- /dev/null +++ b/pretrain/tokenizer/special_tokens_map.json @@ -0,0 +1,9 @@ +{ + "bos_token": "", + "eos_token": "", + "unk_token": "[UNK]", + "sep_token": "[SEP]", + "pad_token": "[PAD]", + "cls_token": "[CLS]", + "mask_token": "[MASK]" +} diff --git a/pretrain/tokenizer/tokenizer_config.json b/pretrain/tokenizer/tokenizer_config.json new file mode 100644 index 0000000..1ee2897 --- /dev/null +++ b/pretrain/tokenizer/tokenizer_config.json @@ -0,0 +1,16 @@ +{ + "do_lower_case": false, + "do_basic_tokenize": true, + "never_split": null, + "unk_token": "[UNK]", + "sep_token": "[SEP]", + "pad_token": "[PAD]", + "cls_token": "[CLS]", + "mask_token": "[MASK]", + "bos_token": "", + "eos_token": "", + "tokenize_chinese_chars": true, + "strip_accents": null, + "model_max_length": 4096, + "tokenizer_class": "BertTokenizer" +} diff --git a/pretrain/tokenizer/vocab.txt b/pretrain/tokenizer/vocab.txt new file mode 100644 index 0000000..fd409a4 --- /dev/null +++ b/pretrain/tokenizer/vocab.txt @@ -0,0 +1,32500 @@ +[PAD] +[UNK] +[CLS] +[SEP] +[MASK] + + +[unused2] +[unused3] +[unused4] +[unused5] +[unused6] +[unused7] +[unused8] +[unused9] +[unused10] +[unused11] +[unused12] +[unused13] +[unused14] +[unused15] +[unused16] +[unused17] +[unused18] +[unused19] +[unused20] +[unused21] +[unused22] +[unused23] +[unused24] +[unused25] +[unused26] +[unused27] +[unused28] +[unused29] +[unused30] +[unused31] +[unused32] +[unused33] +[unused34] +[unused35] +[unused36] +[unused37] +[unused38] +[unused39] +[unused40] +[unused41] +[unused42] +[unused43] +[unused44] +[unused45] +[unused46] +[unused47] +[unused48] +[unused49] +[unused50] +[unused51] +[unused52] +[unused53] +[unused54] +[unused55] +[unused56] +[unused57] +[unused58] +[unused59] +[unused60] +[unused61] +[unused62] +[unused63] +[unused64] +[unused65] +[unused66] +[unused67] +[unused68] +[unused69] +[unused70] +[unused71] +[unused72] +[unused73] +[unused74] +[unused75] +[unused76] +[unused77] +[unused78] +[unused79] +[unused80] +[unused81] +[unused82] +[unused83] +[unused84] +[unused85] +[unused86] +[unused87] +[unused88] +[unused89] +[unused90] +[unused91] +[unused92] +[unused93] +[unused94] +[unused95] +[unused96] +[unused97] +[unused98] +[unused99] +[unused100] +[unused101] +[unused102] +[unused103] +[unused104] +[unused105] +[unused106] +[unused107] +[unused108] +[unused109] +[unused110] +[unused111] +[unused112] +[unused113] +[unused114] +[unused115] +[unused116] +[unused117] +[unused118] +[unused119] +[unused120] +[unused121] +[unused122] +[unused123] +[unused124] +[unused125] +[unused126] +[unused127] +[unused128] +[unused129] +[unused130] +[unused131] +[unused132] +[unused133] +[unused134] +[unused135] +[unused136] +[unused137] +[unused138] +[unused139] +[unused140] +[unused141] +[unused142] +[unused143] +[unused144] +[unused145] +[unused146] +[unused147] +[unused148] +[unused149] +[unused150] +[unused151] +[unused152] +[unused153] +[unused154] +[unused155] +[unused156] +[unused157] +[unused158] +[unused159] +[unused160] +[unused161] +[unused162] +[unused163] +[unused164] +[unused165] +[unused166] +[unused167] +[unused168] +[unused169] +[unused170] +[unused171] +[unused172] +[unused173] +[unused174] +[unused175] +[unused176] +[unused177] +[unused178] +[unused179] +[unused180] +[unused181] +[unused182] +[unused183] +[unused184] +[unused185] +[unused186] +[unused187] +[unused188] +[unused189] +[unused190] +[unused191] +[unused192] +[unused193] +[unused194] +[unused195] +[unused196] +[unused197] +[unused198] +[unused199] +[unused200] +[unused201] +[unused202] +[unused203] +[unused204] +[unused205] +[unused206] +[unused207] +[unused208] +[unused209] +[unused210] +[unused211] +[unused212] +[unused213] +[unused214] +[unused215] +[unused216] +[unused217] +[unused218] +[unused219] +[unused220] +[unused221] +[unused222] +[unused223] +[unused224] +[unused225] +[unused226] +[unused227] +[unused228] +[unused229] +[unused230] +[unused231] +[unused232] +[unused233] +[unused234] +[unused235] +[unused236] +[unused237] +[unused238] +[unused239] +[unused240] +[unused241] +[unused242] +[unused243] +[unused244] +[unused245] +[unused246] +[unused247] +[unused248] +[unused249] +[unused250] +[unused251] +[unused252] +[unused253] +[unused254] +[unused255] +[unused256] +[unused257] +[unused258] +[unused259] +[unused260] +[unused261] +[unused262] +[unused263] +[unused264] +[unused265] +[unused266] +[unused267] +[unused268] +[unused269] +[unused270] +[unused271] +[unused272] +[unused273] +[unused274] +[unused275] +[unused276] +[unused277] +[unused278] +[unused279] +[unused280] +[unused281] +[unused282] +[unused283] +[unused284] +[unused285] +[unused286] +[unused287] +[unused288] +[unused289] +[unused290] +[unused291] +[unused292] +[unused293] +[unused294] +[unused295] +[unused296] +[unused297] +[unused298] +[unused299] +[unused300] +[unused301] +[unused302] +[unused303] +[unused304] +[unused305] +[unused306] +[unused307] +[unused308] +[unused309] +[unused310] +[unused311] +[unused312] +[unused313] +[unused314] +[unused315] +[unused316] +[unused317] +[unused318] +[unused319] +[unused320] +[unused321] +[unused322] +[unused323] +[unused324] +[unused325] +[unused326] +[unused327] +[unused328] +[unused329] +[unused330] +[unused331] +[unused332] +[unused333] +[unused334] +[unused335] +[unused336] +[unused337] +[unused338] +[unused339] +[unused340] +[unused341] +[unused342] +[unused343] +[unused344] +[unused345] +[unused346] +[unused347] +[unused348] +[unused349] +[unused350] +[unused351] +[unused352] +[unused353] +[unused354] +[unused355] +[unused356] +[unused357] +[unused358] +[unused359] +[unused360] +[unused361] +[unused362] +[unused363] +[unused364] +[unused365] +[unused366] +[unused367] +[unused368] +[unused369] +[unused370] +[unused371] +[unused372] +[unused373] +[unused374] +[unused375] +[unused376] +[unused377] +[unused378] +[unused379] +[unused380] +[unused381] +[unused382] +[unused383] +[unused384] +[unused385] +[unused386] +[unused387] +[unused388] +[unused389] +[unused390] +[unused391] +[unused392] +[unused393] +[unused394] +[unused395] +[unused396] +[unused397] +[unused398] +[unused399] +[unused400] +[unused401] +[unused402] +[unused403] +[unused404] +[unused405] +[unused406] +[unused407] +[unused408] +[unused409] +[unused410] +[unused411] +[unused412] +[unused413] +[unused414] +[unused415] +[unused416] +[unused417] +[unused418] +[unused419] +[unused420] +[unused421] +[unused422] +[unused423] +[unused424] +[unused425] +[unused426] +[unused427] +[unused428] +[unused429] +[unused430] +[unused431] +[unused432] +[unused433] +[unused434] +[unused435] +[unused436] +[unused437] +[unused438] +[unused439] +[unused440] +[unused441] +[unused442] +[unused443] +[unused444] +[unused445] +[unused446] +[unused447] +[unused448] +[unused449] +[unused450] +[unused451] +[unused452] +[unused453] +[unused454] +[unused455] +[unused456] +[unused457] +[unused458] +[unused459] +[unused460] +[unused461] +[unused462] +[unused463] +[unused464] +[unused465] +[unused466] +[unused467] +[unused468] +[unused469] +[unused470] +[unused471] +[unused472] +[unused473] +[unused474] +[unused475] +[unused476] +[unused477] +[unused478] +[unused479] +[unused480] +[unused481] +[unused482] +[unused483] +[unused484] +[unused485] +[unused486] +[unused487] +[unused488] +[unused489] +[unused490] +[unused491] +[unused492] +[unused493] +[unused494] +[unused495] +[unused496] +[unused497] +[unused498] +[unused499] +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +A +B +C +D +E +F +G +H +I +J +K +L +M +N +O +P +Q +R +S +T +U +V +W +X +Y +Z +[ +\ +] +^ +_ +` +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +ยง +ยซ +ยฐ +ยฑ +ยฒ +ยณ +ยด +ยท +ยน +ยป +ยฝ +ร‰ +ร— +ร  +รก +รข +รค +รง +รจ +รฉ +รญ +รณ +รถ +รท +รผ +ฤ +ฤซ +ห +ห˜ +หš +ห +ฬ +ฬˆ +ฬฃ +ฬฅ +ฮ” +ฮฑ +ฮฒ +ฮณ +ฮด +ฮต +ฮท +ฮธ +ฮน +ฮบ +ฮป +ฮผ +ฮฝ +ฮฟ +ฯ€ +ฯ +ฯ‚ +ฯƒ +ฯ„ +ฯ‰ +ะ +ะ’ +ะš +ะœ +ะŸ +ะ  +ะก +ะฐ +ะฑ +ะฒ +ะณ +ะด +ะต +ะถ +ะท +ะธ +ะน +ะบ +ะป +ะผ +ะฝ +ะพ +ะฟ +ั€ +ั +ั‚ +ัƒ +ั… +ั† +ั‡ +ัˆ +ั‹ +ัŒ +ั +ุง +ู„ +ูˆ +เท† +เน‘ +แƒฆ +แ„’ +แ†ž +แ†ก +โ€“ +โ€” +โ€• +โ€˜ +โ€™ +โ€œ +โ€ +โ€ข +โ€ค +โ€ฅ +โ€ฆ +โ€ง +โ€ฒ +โ€ป +โ€ผ +โ‚ +โ‚‚ +โ‚ฉ +โ‚ฌ +โ„ƒ +โ„“ +โ„ข +โ…“ +โ…  +โ…ก +โ…ข +โ…ฃ +โ…ค +โ…ฅ +โ† +โ†‘ +โ†’ +โ†“ +โ†” +โ‡’ +โˆ’ +โˆ™ +โˆผ +โ‰ช +โ‰ซ +โŠ™ +โ‹… +โ‘  +โ‘ก +โ‘ข +โ‘ฃ +โ‘ค +โ‘ฅ +โ‘ฆ +โ‘ง +โ‘จ +โ‘ฉ +โ‘ช +โ‘ซ +โ‘ด +โ‘ต +โ‘ถ +โ‘ท +โ“ +โ“‘ +โ“’ +โ”€ +โ” +โ”‚ +โ”ƒ +โ”Œ +โ”” +โ”œ +โ”ฌ +โ–  +โ–ก +โ–ฃ +โ–จ +โ–ช +โ–ซ +โ–ฒ +โ–ณ +โ–ถ +โ–ท +โ–ผ +โ–ฝ +โ—€ +โ—† +โ—‡ +โ—ˆ +โ—‹ +โ—Ž +โ— +โ—ก +โ˜… +โ˜† +โ˜Ž +โ˜ +โ˜• +โ˜ž +โ˜บ +โ™€ +โ™‚ +โ™ก +โ™ฃ +โ™ค +โ™ฅ +โ™ฉ +โ™ช +โ™ฌ +โœ… +โœŒ +โœ” +โœจ +โ— +โฃ +โค +โžก +โ € +โญ +ใ€ +ใ€‚ +ใ€ƒ +ใ€ˆ +ใ€‰ +ใ€Š +ใ€‹ +ใ€Œ +ใ€ +ใ€Ž +ใ€ +ใ€ +ใ€‘ +ใ€“ +ใ€” +ใ€• +ใ€œ +ใ€ฐ +ใ‚ +ใ„ +ใ† +ใˆ +ใŠ +ใ‹ +ใŒ +ใ +ใ +ใ‘ +ใ“ +ใ• +ใ— +ใ™ +ใŸ +ใ  +ใก +ใฃ +ใค +ใฆ +ใง +ใจ +ใช +ใซ +ใฌ +ใฎ +ใฏ +ใพ +ใฟ +ใ‚‚ +ใ‚„ +ใ‚ˆ +ใ‚‰ +ใ‚Š +ใ‚‹ +ใ‚Œ +ใ‚ +ใ‚’ +ใ‚“ +ใ‚ข +ใ‚ฃ +ใ‚ค +ใ‚ซ +ใ‚ญ +ใ‚ฏ +ใ‚ฐ +ใ‚ณ +ใ‚ท +ใ‚ธ +ใ‚น +ใ‚ฟ +ใƒ +ใƒƒ +ใƒ† +ใƒˆ +ใƒ‰ +ใƒ‹ +ใƒŽ +ใƒ• +ใƒ– +ใƒ +ใƒž +ใƒ  +ใƒก +ใƒฉ +ใƒช +ใƒซ +ใƒฌ +ใƒญ +ใƒณ +ใƒป +ใƒผ +ใ„ฑ +ใ„ฒ +ใ„ด +ใ„ท +ใ„น +ใ… +ใ…‚ +ใ…ƒ +ใ…… +ใ…† +ใ…‡ +ใ…ˆ +ใ…‰ +ใ…Š +ใ…‹ +ใ…Œ +ใ… +ใ…Ž +ใ… +ใ… +ใ…‘ +ใ…“ +ใ…” +ใ…• +ใ…— +ใ…› +ใ…œ +ใ…  +ใ…ก +ใ…ฃ +ใ…ค +ใ† +ใˆŽ +ใˆ +ใˆ +ใˆ‘ +ใˆ” +ใˆœ +ใ‰  +ใ‰ก +ใ‰ข +ใ‰ฃ +ใ‰ฎ +ใ‰ฏ +ใ‰ฐ +ใŽ‰ +ใŽ +ใŽŽ +ใŽ +ใŽ’ +ใŽ“ +ใŽ– +ใŽ› +ใŽœ +ใŽ +ใŽž +ใŽก +ใŽข +ใŽฅ +ใŽพ +ใŽฟ +ใ„ +ใˆ +ใŠ +ไธ€ +ไธ +ไธƒ +ไธˆ +ไธ‰ +ไธŠ +ไธ‹ +ไธ +ไธ‘ +ไธ” +ไธ– +ไธ˜ +ไธ™ +ไธž +ไธญ +ไธธ +ไธน +ไธป +ไนƒ +ไน… +ไน‹ +ไนŽ +ไน˜ +ไน™ +ไน +ไนŸ +ไนณ +ไนพ +ไบ‚ +ไบ† +ไบ‹ +ไบŒ +ไบŽ +ไบ‘ +ไบ’ +ไบ” +ไบ• +ไบž +ไบก +ไบค +ไบฅ +ไบฆ +ไบจ +ไบซ +ไบฌ +ไบญ +ไบฎ +ไบบ +ไป +ไปŠ +ไป‹ +ไป• +ไป– +ไป˜ +ไป™ +ไปฃ +ไปค +ไปฅ +ไปฐ +ไปฒ +ไปถ +ไปป +ไผ +ไผŠ +ไผ +ไผ +ไผ‘ +ไผš +ไผฏ +ไผผ +ไผฝ +ไฝ† +ไฝ +ไฝŽ +ไฝ +ไฝ +ไฝ• +ไฝ™ +ไฝ› +ไฝœ +ไฝณ +ไฝฟ +ไพ† +ไพ‹ +ไพ +ไพ› +ไพ +ไพฏ +ไพต +ไพฟ +ไฟ‚ +ไฟŠ +ไฟ— +ไฟ +ไฟก +ไฟฎ +ไฟฑ +ๅ€‚ +ๅ€‰ +ๅ€‹ +ๅ€ +ๅ€’ +ๅ€™ +ๅ€Ÿ +ๅ€ค +ๅ€ซ +ๅ€ญ +ๅ‡ +ๅ‰ +ๅ +ๅœ +ๅฅ +ๅ‚… +ๅ‚™ +ๅ‚ณ +ๅ‚ต +ๅ‚ท +ๅƒ‰ +ๅƒ +ๅƒ• +ๅƒž +ๅƒง +ๅƒน +ๅ„€ +ๅ„’ +ๅ„Ÿ +ๅ„ช +ๅ… +ๅ…ƒ +ๅ…„ +ๅ…… +ๅ…† +ๅ…ˆ +ๅ…‰ +ๅ…‹ +ๅ… +ๅ…’ +ๅ…ฅ +ๅ…ง +ๅ…จ +ๅ…ฉ +ๅ…ช +ๅ…ซ +ๅ…ฌ +ๅ…ญ +ๅ…ฑ +ๅ…ต +ๅ…ถ +ๅ…ท +ๅ…ธ +ๅ…ผ +ๅ†… +ๅ†Š +ๅ† +ๅ†  +ๅ†ฌ +ๅ‡‰ +ๅ‡ก +ๅ‡ถ +ๅ‡บ +ๅˆ€ +ๅˆ† +ๅˆ‡ +ๅˆŠ +ๅˆ‘ +ๅˆ— +ๅˆ +ๅˆค +ๅˆฅ +ๅˆฉ +ๅˆฐ +ๅˆถ +ๅˆธ +ๅˆบ +ๅˆป +ๅ‰‡ +ๅ‰ +ๅ‰› +ๅ‰ฏ +ๅ‰ฒ +ๅ‰ต +ๅŠƒ +ๅŠ‡ +ๅŠ‰ +ๅŠ +ๅŠ› +ๅŠŸ +ๅŠ  +ๅŠฉ +ๅ‹‡ +ๅ‹’ +ๅ‹• +ๅ‹™ +ๅ‹ +ๅ‹ž +ๅ‹ข +ๅ‹ค +ๅ‹ณ +ๅ‹ธ +ๅ‹ฟ +ๅŒ… +ๅŒ– +ๅŒ— +ๅŒ  +ๅŒก +ๅ€ +ๅ +ๅƒ +ๅ‡ +ๅˆ +ๅŠ +ๅ‘ +ๅ’ +ๅ“ +ๅ” +ๅ— +ๅš +ๅœ +ๅž +ๅ  +ๅฆ +ๅฏ +ๅฐ +ๅฑ +ๅด +ๅท +ๅฝ +ๅฟ +ๅŽš +ๅŽŸ +ๅŽป +ๅƒ +ๅˆ +ๅŠ +ๅ‹ +ๅ +ๅ” +ๅ– +ๅ— +ๅข +ๅฃ +ๅค +ๅฅ +ๅช +ๅฌ +ๅฏ +ๅฐ +ๅฒ +ๅณ +ๅธ +ๅ„ +ๅˆ +ๅ‰ +ๅŒ +ๅ +ๅŽ +ๅ +ๅ‘ +ๅ› +ๅฆ +ๅซ +ๅณ +ๅพ +ๅ‘‚ +ๅ‘Š +ๅ‘จ +ๅ‘ณ +ๅ‘ผ +ๅ‘ฝ +ๅ’Œ +ๅ’ธ +ๅ“€ +ๅ“ +ๅ“‰ +ๅ“ก +ๅ“ฒ +ๅ” +ๅ”ฏ +ๅ”ฑ +ๅ•† +ๅ• +ๅ•“ +ๅ–„ +ๅ–œ +ๅ–ช +ๅ–ฎ +ๅ˜‰ +ๅ˜— +ๅ™จ +ๅšด +ๅ›› +ๅ›ž +ๅ›  +ๅ›บ +ๅ›ฝ +ๅœˆ +ๅœ‹ +ๅœ +ๅœ’ +ๅœ“ +ๅœ– +ๅœ˜ +ๅœŸ +ๅœจ +ๅœญ +ๅœฐ +ๅ€ +ๅ‚ +ๅ‡ +ๅŠ +ๅ +ๅก +ๅค +ๅช +ๅž‹ +ๅŸŽ +ๅŸŸ +ๅŸท +ๅŸบ +ๅ ‚ +ๅ … +ๅ ฏ +ๅ ฑ +ๅ ด +ๅก” +ๅกš +ๅกต +ๅขƒ +ๅข“ +ๅขž +ๅขจ +ๅขณ +ๅฃ +ๅฃ‡ +ๅฃ“ +ๅฃค +ๅฃซ +ๅฃฌ +ๅฃฏ +ๅฃฝ +ๅค +ๅค• +ๅค– +ๅคš +ๅคœ +ๅคข +ๅคง +ๅคฉ +ๅคช +ๅคซ +ๅคฎ +ๅคฑ +ๅคท +ๅฅ‡ +ๅฅˆ +ๅฅ‰ +ๅฅŽ +ๅฅ +ๅฅ‘ +ๅฅณ +ๅฅด +ๅฅฝ +ๅฆ‚ +ๅฆƒ +ๅฆ„ +ๅฆ“ +ๅฆ™ +ๅฆป +ๅง‹ +ๅง“ +ๅง” +ๅงœ +ๅงฌ +ๅจ +ๅฉš +ๅฉข +ๅฉฆ +ๅญ +ๅญ” +ๅญ— +ๅญ˜ +ๅญ +ๅญŸ +ๅญฃ +ๅญค +ๅญฆ +ๅญซ +ๅญธ +ๅฎ… +ๅฎ‡ +ๅฎˆ +ๅฎ‰ +ๅฎ‹ +ๅฎŒ +ๅฎ— +ๅฎ˜ +ๅฎ™ +ๅฎš +ๅฎœ +ๅฎข +ๅฎฃ +ๅฎค +ๅฎฎ +ๅฎฐ +ๅฎณ +ๅฎด +ๅฎถ +ๅฎน +ๅฎฟ +ๅฏ‚ +ๅฏ„ +ๅฏ… +ๅฏ† +ๅฏŒ +ๅฏ’ +ๅฏŸ +ๅฏก +ๅฏฆ +ๅฏง +ๅฏฉ +ๅฏซ +ๅฏฌ +ๅฏถ +ๅฏบ +ๅฐ +ๅฐ„ +ๅฐ‡ +ๅฐˆ +ๅฐ‰ +ๅฐŠ +ๅฐ‹ +ๅฐ +ๅฐŽ +ๅฐ +ๅฐ‘ +ๅฐ™ +ๅฐฑ +ๅฐน +ๅฐบ +ๅฐผ +ๅฐพ +ๅฑ€ +ๅฑ… +ๅฑˆ +ๅฑ‹ +ๅฑ• +ๅฑค +ๅฑฅ +ๅฑฌ +ๅฑฑ +ๅฒ +ๅฒก +ๅฒฉ +ๅฒณ +ๅฒธ +ๅณฏ +ๅณฐ +ๅณด +ๅณถ +ๅด‡ +ๅดŽ +ๅด” +ๅถบ +ๅท– +ๅท +ๅทž +ๅทก +ๅทฅ +ๅทฆ +ๅทจ +ๅทซ +ๅทฎ +ๅทฑ +ๅทฒ +ๅทณ +ๅทด +ๅธ‚ +ๅธƒ +ๅธŒ +ๅธ– +ๅธ +ๅธฅ +ๅธซ +ๅธญ +ๅธถ +ๅธธ +ๅน• +ๅนฒ +ๅนณ +ๅนด +ๅนธ +ๅนน +ๅนป +ๅนผ +ๅนฝ +ๅนพ +ๅบŠ +ๅบ +ๅบ• +ๅบ— +ๅบš +ๅบœ +ๅบฆ +ๅบง +ๅบซ +ๅบญ +ๅบต +ๅบถ +ๅบท +ๅบธ +ๅป‰ +ๅปŸ +ๅปข +ๅปฃ +ๅปณ +ๅปถ +ๅปท +ๅปบ +ๅผ +ๅผ“ +ๅผ” +ๅผ• +ๅผ— +ๅผ˜ +ๅผŸ +ๅผฑ +ๅผต +ๅผบ +ๅผผ +ๅฝˆ +ๅฝŒ +ๅฝข +ๅฝฆ +ๅฝฉ +ๅฝญ +ๅฝฑ +ๅฝน +ๅฝผ +ๅพ€ +ๅพ +ๅพ… +ๅพ‹ +ๅพŒ +ๅพ +ๅพ’ +ๅพ— +ๅพž +ๅพก +ๅพฉ +ๅพฎ +ๅพต +ๅพท +ๅพฝ +ๅฟƒ +ๅฟ… +ๅฟŒ +ๅฟ +ๅฟ— +ๅฟ˜ +ๅฟ  +ๅฟต +ๅฟฝ +ๆ€’ +ๆ€ +ๆ€ฅ +ๆ€ง +ๆ€จ +ๆ€ช +ๆ +ๆ’ +ๆจ +ๆฉ +ๆญ +ๆฏ +ๆ‚… +ๆ‚Ÿ +ๆ‚ฃ +ๆ‚ฒ +ๆƒ… +ๆƒ‘ +ๆƒŸ +ๆƒ  +ๆƒก +ๆƒฑ +ๆƒณ +ๆ„ +ๆ„š +ๆ„› +ๆ„Ÿ +ๆ„ผ +ๆ…ˆ +ๆ…‹ +ๆ…• +ๆ…ง +ๆ…ฎ +ๆ…ถ +ๆ…พ +ๆ†‚ +ๆ†ฒ +ๆ‡‰ +ๆ‡ท +ๆ‡ฟ +ๆˆŠ +ๆˆŒ +ๆˆ +ๆˆ‘ +ๆˆ’ +ๆˆ– +ๆˆฆ +ๆˆฐ +ๆˆฑ +ๆˆด +ๆˆถ +ๆˆฟ +ๆ‰€ +ๆ‰‹ +ๆ‰ +ๆ‰“ +ๆ‰ถ +ๆ‰น +ๆ‰ฟ +ๆŠ€ +ๆŠ• +ๆŠ— +ๆŠ˜ +ๆ‹› +ๆ‹œ +ๆŒ +ๆŒ‡ +ๆŒฏ +ๆŽˆ +ๆŽŒ +ๆŽ’ +ๆŽข +ๆŽฅ +ๆŽจ +ๆ +ๆš +ๆ› +ๆด +ๆ +ๆ‘  +ๆ‘ฉ +ๆ’ซ +ๆ’ญ +ๆ’ฐ +ๆ“‡ +ๆ“Š +ๆ“ +ๆ“” +ๆ“š +ๆ“ง +ๆ“ฌ +ๆ” +ๆ”ฏ +ๆ”ถ +ๆ”น +ๆ”ป +ๆ”พ +ๆ”ฟ +ๆ•… +ๆ•ˆ +ๆ• +ๆ•Ž +ๆ• +ๆ•‘ +ๆ•— +ๆ•ข +ๆ•ฃ +ๆ•ฆ +ๆ•ฌ +ๆ•ด +ๆ•ต +ๆ•ธ +ๆ–‡ +ๆ–— +ๆ–™ +ๆ–ฅ +ๆ–ฏ +ๆ–ฐ +ๆ–ท +ๆ–น +ๆ–ผ +ๆ–ฝ +ๆ—… +ๆ—Œ +ๆ— +ๆ—— +ๆ—ฃ +ๆ—ฅ +ๆ—จ +ๆ—ฉ +ๆ˜‡ +ๆ˜Œ +ๆ˜Ž +ๆ˜“ +ๆ˜Ÿ +ๆ˜  +ๆ˜ฅ +ๆ˜ญ +ๆ˜ฏ +ๆ™‚ +ๆ™‰ +ๆ™‹ +ๆ™ฉ +ๆ™ฎ +ๆ™ฏ +ๆ™บ +ๆš— +ๆšด +ๆ›† +ๆ›‰ +ๆ›ฐ +ๆ›ฒ +ๆ›ด +ๆ›ธ +ๆ›น +ๆ›บ +ๆ›พ +ๆœ€ +ๆœƒ +ๆœˆ +ๆœ‰ +ๆœ‹ +ๆœ +ๆœ” +ๆœ› +ๆœ +ๆœŸ +ๆœจ +ๆœช +ๆœซ +ๆœฌ +ๆœฑ +ๆœด +ๆŽ +ๆ +ๆ‘ +ๆ– +ๆœ +ๆŸ +ๆฑ +ๆพ +ๆฟ +ๆž— +ๆžœ +ๆž +ๆŸ +ๆŸ“ +ๆŸ” +ๆŸฑ +ๆŸณ +ๆŸป +ๆ ก +ๆ ช +ๆ ธ +ๆ น +ๆ ผ +ๆก‚ +ๆกƒ +ๆกˆ +ๆก‘ +ๆก“ +ๆข +ๆข… +ๆข +ๆขต +ๆฃ„ +ๆฃฎ +ๆค +ๆฅŠ +ๆฅš +ๆฅญ +ๆฅต +ๆฆฎ +ๆง‹ +ๆงช +ๆจ‚ +ๆจ“ +ๆจ™ +ๆจž +ๆจก +ๆจฃ +ๆจน +ๆฉ‹ +ๆฉŸ +ๆฉซ +ๆช€ +ๆชข +ๆฌŠ +ๆฌก +ๆฌฒ +ๆฌฝ +ๆฌพ +ๆญŒ +ๆญ +ๆญข +ๆญฃ +ๆญค +ๆญฅ +ๆญฆ +ๆญฒ +ๆญท +ๆญธ +ๆญป +ๆฎŠ +ๆฎ˜ +ๆฎต +ๆฎท +ๆฎบ +ๆฎฟ +ๆฏ… +ๆฏ +ๆฏ +ๆฏ’ +ๆฏ” +ๆฏ› +ๆฐ +ๆฐ‘ +ๆฐฃ +ๆฐด +ๆฐท +ๆฐธ +ๆฑ‚ +ๆฑ— +ๆฑ +ๆฑŸ +ๆฑ  +ๆฑบ +ๆฒˆ +ๆฒ’ +ๆฒ™ +ๆฒณ +ๆฒน +ๆฒป +ๆณ +ๆณ‰ +ๆณ• +ๆณข +ๆณจ +ๆณฐ +ๆณณ +ๆด‹ +ๆด— +ๆด™ +ๆด› +ๆดž +ๆดฅ +ๆดช +ๆดฒ +ๆดป +ๆดพ +ๆต +ๆต™ +ๆตฆ +ๆตฉ +ๆตช +ๆตฎ +ๆตท +ๆถˆ +ๆถ‰ +ๆท‘ +ๆทก +ๆทจ +ๆทซ +ๆทฎ +ๆทฑ +ๆทณ +ๆทต +ๆทท +ๆทธ +ๆธ… +ๆธ› +ๆธก +ๆธฌ +ๆธฏ +ๆน– +ๆนฏ +ๆบ +ๆบ– +ๆบช +ๆบซ +ๆป… +ๆปฟ +ๆผ +ๆผ +ๆผ” +ๆผข +ๆผธ +ๆฝ› +ๆฝค +ๆฝญ +ๆฝฎ +ๆพค +ๆฟŸ +ๆฟค +็ฃ +็ซ +็ฝ +็‚Ž +็‚ณ +็ƒˆ +็ƒ +็„‰ +็„ก +็„ถ +็…™ +็…ฅ +็…ง +็…ฉ +็†Š +็†™ +็†Ÿ +็†ฑ +็‡ˆ +็‡• +็‡Ÿ +็‡ฎ +็ˆญ +็ˆฒ +็ˆต +็ˆถ +็ˆพ +็‰‡ +็‰ˆ +็‰™ +็‰› +็‰ง +็‰ฉ +็‰น +็Šฌ +็Šฏ +็‹€ +็‹‚ +็‹— +็Œถ +็„ +็จ +็ธ +็ป +็Ž„ +็Ž‡ +็Ž‰ +็Ž‹ +็ +็  +็ญ +็พ +็ƒ +็† +็ด +็‘ž +็’ฐ +็”˜ +็”š +็”Ÿ +็”ฃ +็”จ +็”ซ +็”ฐ +็”ฑ +็”ฒ +็”ณ +็”ท +็”บ +็•Œ +็•™ +็•ฅ +็•ช +็•ฐ +็•ต +็•ถ +็•ฟ +็– +็–‘ +็–พ +็—… +็—‡ +็—› +็™ธ +็™ป +็™ผ +็™ฝ +็™พ +็š„ +็š† +็š‡ +็šฎ +็›Š +็›› +็›œ +็›Ÿ +็›ก +็›ฃ +็›ค +็›ง +็›ฎ +็›ด +็›ธ +็œ +็œ‹ +็œž +็œ  +็œผ +็€ +็ฃ +็Ÿฃ +็Ÿฅ +็Ÿญ +็Ÿณ +็ ‚ +็ ฒ +็ ด +็ก +็ข‘ +็ขง +็ขฉ +็ขบ +็ฃ +็ฃจ +็คบ +็คพ +็ฅ€ +็ฅ +็ฅ– +็ฅ +็ฅž +็ฅ  +็ฅฅ +็ฅจ +็ฅญ +็ฅฟ +็ฆ +็ฆ +็ฆ +็ฆช +็ฆฎ +็ฆน +็ง€ +็ง +็ง‰ +็ง‹ +็ง‘ +็ง˜ +็งŸ +็งฆ +็งฉ +็งป +็จ… +็จ‹ +็จฎ +็จฑ +็จท +็จฟ +็ฉ€ +็ฉ† +็ฉ +็ฉด +็ฉถ +็ฉบ +็ช +็ช“ +็ชฎ +็ซ‹ +็ซ  +็ซฅ +็ซฏ +็ซถ +็ซน +็ฌ‘ +็ฌฆ +็ฌฌ +็ญ† +็ญ‰ +็ญ” +็ญ– +็ฎ• +็ฎ— +็ฎก +็ฏ€ +็ฏ„ +็ฏ‡ +็ฏ‰ +็ฐก +็ฐฟ +็ฑ +็ฑณ +็ฒพ +็ณป +็ด€ +็ด„ +็ด… +็ด‹ +็ด +็ด” +็ด™ +็ดš +็ด  +็ดซ +็ดฐ +็ดน +็ต‚ +็ต„ +็ต +็ตฆ +็ตฑ +็ตฒ +็ตถ +็ถ“ +็ถ  +็ถญ +็ถฑ +็ถฒ +็ทš +็ทฃ +็ทจ +็ธฃ +็ธฝ +็น” +็นผ +็บŒ +็ฝช +็ฝฎ +็ฝฐ +็ฝฒ +็พ… +็พŠ +็พŽ +็พค +็พฉ +็พฝ +็ฟ +็ฟŠ +็ฟ’ +็ฟฐ +็ฟผ +่€ +่€ƒ +่€… +่€Œ +่€• +่€ณ +่€ถ +่– +่ž +่ฏ +่ฒ +่ท +่ฝ +่‚… +่‚‰ +่‚ฒ +่ƒŒ +่ƒŽ +่ƒก +่ƒฝ +่„ˆ +่„ซ +่…ฆ +่…ธ +่…น +่‡ฃ +่‡จ +่‡ช +่‡ณ +่‡ด +่‡บ +่ˆ‡ +่ˆˆ +่ˆŠ +่ˆŒ +่ˆ +่ˆœ +่ˆž +่ˆŸ +่ˆช +่ˆฌ +่ˆน +่‰ฏ +่‰ฒ +่Š +่Šฑ +่Šณ +่‹‘ +่‹ฅ +่‹ฆ +่‹ฑ +่Œ‚ +่Œƒ +่Œถ +่‰ +่’ +่ท +่ŽŠ +่Žซ +่Š +่œ +่ฉ +่ฏ +่ด +่ฌ +่ฝ +่‘‰ +่‘— +่‘› +่‘ฃ +่‘ฌ +่’™ +่’ผ +่“‹ +่“ฎ +่”ก +่•ญ +่–„ +่–› +่–ฉ +่— +่— +่—ค +่—ฅ +่—ฉ +่˜‡ +่˜ญ +่™Ž +่™• +่™› +่™ž +่™Ÿ +่›‡ +่œ€ +่ž +่ก€ +่ก† +่กŒ +่ก +่ก“ +่ก— +่ก› +่กก +่กฃ +่กจ +่กฐ +่ข +่ขซ +่ฃ +่ฃ• +่ฃœ +่ฃ +่ฃต +่ฃฝ +่ค‡ +่ฅ„ +่ฅฟ +่ฆ +่ฆ† +่ฆ‡ +่ฆ‹ +่ฆ +่ฆ– +่ฆช +่ฆบ +่ฆฝ +่ง€ +่ง’ +่งฃ +่งธ +่จ€ +่จˆ +่จŽ +่จ“ +่จ— +่จ˜ +่จŸ +่จฅ +่จช +่จญ +่จฑ +่จด +่จป +่ฉ• +่ฉž +่ฉฆ +่ฉฉ +่ฉฑ +่ชŒ +่ช +่ชž +่ช  +่ชค +่ชช +่ชฒ +่ชฟ +่ซ‡ +่ซ‹ +่ซ– +่ซฆ +่ซซ +่ซธ +่ฌ€ +่ฌ‚ +่ฌ™ +่ฌ› +่ฌ +่ญ‰ +่ญ˜ +่ญœ +่ญฆ +่ญฏ +่ญฐ +่ญท +่ฎ€ +่ฎŠ +่ฎ“ +่ฐท +่ฑ† +่ฑˆ +่ฑŠ +่ฑก +่ฑช +่ฑซ +่ฒž +่ฒ  +่ฒก +่ฒข +่ฒง +่ฒจ +่ฒช +่ฒซ +่ฒฌ +่ฒด +่ฒท +่ฒธ +่ฒป +่ฒฟ +่ณ€ +่ณƒ +่ณ‡ +่ณˆ +่ณŠ +่ณ“ +่ณœ +่ณž +่ณ  +่ณข +่ณฃ +่ณค +่ณฆ +่ณช +่ดˆ +่ดŠ +่ตค +่ตฐ +่ตท +่ถ… +่ถŠ +่ถ™ +่ถณ +่ทฏ +่นŸ +่บซ +่ปŠ +่ปŒ +่ป +่ป’ +่ผ‰ +่ผ” +่ผ• +่ผ +่ผช +่ผฏ +่ผธ +่ผฟ +่ฝ‰ +่พ› +่พจ +่พญ +่พฏ +่พฐ +่พฒ +่ฟŽ +่ฟ‘ +่ฟฐ +่ฟท +่ฟฝ +้€€ +้€ +้€† +้€š +้€Ÿ +้€  +้€ฃ +้€ฒ +้€ธ +้‚ +้‡ +้Š +้‹ +้Ž +้“ +้” +้• +้  +้ฉ +้ท +้ธ +้บ +้ผ +้ฟ +้‚„ +้‚Š +้‚‘ +้‚ฃ +้‚ฆ +้‚ช +้‚ฑ +้ƒŽ +้ƒž +้ƒก +้ƒจ +้ƒญ +้ƒฝ +้„• +้„ง +้„ญ +้…‰ +้… +้…’ +้…ธ +้†ซ +้‡‹ +้‡Œ +้‡ +้‡Ž +้‡ +้‡‘ +้‡œ +้‡ +้‰‰ +้Š€ +้Š… +้Š˜ +้Œ„ +้Œข +้Œฆ +้Œซ +้Œฏ +้พ +้Žฌ +้Žญ +้ก +้˜ +้ต +้‘‘ +้•ท +้–€ +้–‹ +้–‘ +้–“ +้–” +้–ฃ +้—œ +้˜ช +้˜ฒ +้˜ฟ +้™€ +้™„ +้™ +้™ +้™ข +้™ฃ +้™ค +้™ฐ +้™ณ +้™ต +้™ถ +้™ธ +้™ฝ +้š† +้šŠ +้š‹ +้šŽ +้š› +้šœ +้šจ +้šช +้šฑ +้›„ +้›… +้›† +้›– +้›™ +้›œ +้›ข +้›ฃ +้›จ +้›ช +้›ฒ +้›ป +้œ€ +้œ‡ +้œฒ +้ˆ +้‘ +้– +้œ +้ž +้ข +้ฉ +้Ÿ“ +้Ÿณ +้Ÿป +้ ‚ +้ … +้ † +้ ˆ +้ Œ +้ ˜ +้ ญ +้กŒ +้ก +้ก” +้ก˜ +้กž +้กง +้กฏ +้ขจ +้ฃ› +้ฃŸ +้ฃฎ +้ฃฏ +้คŠ +้ค˜ +้คจ +้ฆ– +้ฆ™ +้ฆฌ +้ง +้จŽ +้ฉ— +้ฉ› +้ชจ +้ซ” +้ซ˜ +้ซฎ +้ฌช +้ฌผ +้ญ‚ +้ญ +้ญ” +้ญš +้ญฏ +้ฎฎ +้ณฅ +้ณณ +้ณด +้ดป +้ถด +้ท„ +้นฝ +้นฟ +้บ— +้บŸ +้บป +้ปƒ +้ป‘ +้ป˜ +้ปž +้ปจ +้ผŽ +้ผ“ +้ฝŠ +้ฝ‹ +้ฝ’ +้ฝก +้พ +้พœ +๊ฐ€ +๊ฐ +๊ฐ„ +๊ฐ‡ +๊ฐˆ +๊ฐ‰ +๊ฐ +๊ฐ‘ +๊ฐ’ +๊ฐ“ +๊ฐ” +๊ฐ• +๊ฐ– +๊ฐ— +๊ฐ™ +๊ฐš +๊ฐ› +๊ฐœ +๊ฐ +๊ฐ  +๊ฐค +๊ฐฌ +๊ฐญ +๊ฐฏ +๊ฐฐ +๊ฐฑ +๊ฐธ +๊ฑ€ +๊ฑ +๊ฑ” +๊ฑฐ +๊ฑฑ +๊ฑด +๊ฑท +๊ฑธ +๊ฒ€ +๊ฒ +๊ฒƒ +๊ฒ„ +๊ฒ… +๊ฒ‰ +๊ฒŠ +๊ฒ‹ +๊ฒŒ +๊ฒ +๊ฒ” +๊ฒœ +๊ฒŸ +๊ฒ  +๊ฒก +๊ฒจ +๊ฒฉ +๊ฒช +๊ฒฌ +๊ฒฐ +๊ฒธ +๊ฒน +๊ฒป +๊ฒผ +๊ฒฝ +๊ณ +๊ณ„ +๊ณ— +๊ณ  +๊ณก +๊ณค +๊ณง +๊ณจ +๊ณช +๊ณฏ +๊ณฐ +๊ณฑ +๊ณณ +๊ณต +๊ณถ +๊ณธ +๊ณผ +๊ณฝ +๊ด€ +๊ด„ +๊ดŒ +๊ด‘ +๊ด˜ +๊ดœ +๊ดญ +๊ดด +๊ต‰ +๊ต +๊ตฃ +๊ตฌ +๊ตญ +๊ตฐ +๊ตณ +๊ตด +๊ตต +๊ตถ +๊ตผ +๊ตฝ +๊ตฟ +๊ถ +๊ถ‚ +๊ถˆ +๊ถŒ +๊ถ +๊ถœ +๊ถค +๊ท€ +๊ท„ +๊ทˆ +๊ท +๊ท“ +๊ทœ +๊ท  +๊ทค +๊ทธ +๊ทน +๊ทผ +๊ธ€ +๊ธ +๊ธˆ +๊ธ‰ +๊ธ‹ +๊ธ +๊ธ” +๊ธฐ +๊ธฑ +๊ธด +๊ธธ +๊ธฟ +๊น€ +๊น +๊นƒ +๊น„ +๊น… +๊นŠ +๊นŒ +๊น +๊นŽ +๊น +๊น” +๊นœ +๊น +๊นŸ +๊นก +๊นฅ +๊นจ +๊นฌ +๊นฐ +๊นป +๊นผ +๊นฝ +๊บ„ +๊บ… +๊บผ +๊บฝ +๊บพ +๊ป€ +๊ป„ +๊ปŒ +๊ป +๊ป +๊ป +๊ป‘ +๊ป˜ +๊ปด +๊ผˆ +๊ผฌ +๊ผญ +๊ผฐ +๊ผด +๊ผผ +๊ผฝ +๊ผฟ +๊ฝ +๊ฝ‚ +๊ฝƒ +๊ฝˆ +๊ฝ‰ +๊ฝ +๊ฝค +๊ฝฅ +๊ฝน +๊พ€ +๊พธ +๊พน +๊พผ +๊ฟ€ +๊ฟ‡ +๊ฟˆ +๊ฟ‰ +๊ฟ‹ +๊ฟ +๊ฟŽ +๊ฟ” +๊ฟจ +๊ฟฉ +๊ฟฐ +๋€Œ +๋€ +๋€” +๋€œ +๋€จ +๋„ +๋ˆ +๋Š +๋Œ +๋“ +๋” +๋— +๋™ +๋ +๋ผ +๋ฝ +๋‚€ +๋‚„ +๋‚Œ +๋‚ +๋‚‘ +๋‚˜ +๋‚™ +๋‚š +๋‚œ +๋‚Ÿ +๋‚  +๋‚ก +๋‚จ +๋‚ฉ +๋‚ซ +๋‚ฌ +๋‚ญ +๋‚ฎ +๋‚ฏ +๋‚ฑ +๋‚ณ +๋‚ด +๋‚ต +๋‚ธ +๋‚ผ +๋ƒ„ +๋ƒ… +๋ƒ‡ +๋ƒˆ +๋ƒ‰ +๋ƒ +๋ƒ‘ +๋ƒ” +๋ƒ˜ +๋ƒ  +๋ƒฅ +๋„ˆ +๋„‰ +๋„‹ +๋„Œ +๋„ +๋„“ +๋„˜ +๋„™ +๋„› +๋„œ +๋„ +๋„ฃ +๋„ค +๋„ฅ +๋„จ +๋„ฌ +๋„ด +๋„ต +๋„ท +๋„ธ +๋„น +๋„ผ +๋…€ +๋… +๋…„ +๋… +๋…” +๋…• +๋…˜ +๋…œ +๋…ธ +๋…น +๋…ผ +๋†€ +๋†ˆ +๋†‰ +๋†‹ +๋† +๋†’ +๋†“ +๋†” +๋†จ +๋‡Œ +๋‡จ +๋‡ฐ +๋‡ธ +๋‡ฝ +๋ˆ„ +๋ˆ… +๋ˆˆ +๋ˆŒ +๋ˆ” +๋ˆ• +๋ˆ— +๋ˆ™ +๋ˆ  +๋ˆด +๋‰˜ +๋‰œ +๋‰  +๋‰ด +๋Š„ +๋Š‰ +๋Š +๋Š‘ +๋Š” +๋Š˜ +๋Š™ +๋Š  +๋Šฅ +๋Šฆ +๋Šช +๋Šฌ +๋‹ˆ +๋‹‰ +๋‹Œ +๋‹ +๋‹˜ +๋‹™ +๋‹› +๋‹ +๋‹ข +๋‹ค +๋‹ฅ +๋‹ฆ +๋‹จ +๋‹ซ +๋‹ฌ +๋‹ญ +๋‹ฎ +๋‹ณ +๋‹ด +๋‹ต +๋‹ท +๋‹น +๋‹ป +๋‹ผ +๋‹ฟ +๋Œ€ +๋Œ +๋Œ„ +๋Œˆ +๋Œ +๋Œ‘ +๋Œ“ +๋Œ” +๋Œ• +๋Œœ +๋” +๋• +๋˜ +๋œ +๋Ÿ +๋ค +๋ฅ +๋ง +๋ฉ +๋ซ +๋ฎ +๋ฐ +๋ฑ +๋ด +๋ธ +๋Ž€ +๋Ž +๋Žƒ +๋Ž… +๋Žˆ +๋ŽŒ +๋Ž +๋Ž  +๋„ +๋… +๋ˆ +๋‹ +๋Œ +๋” +๋• +๋— +๋™ +๋› +๋ผ +๋ +๋ +๋˜ +๋œ +๋  +๋จ +๋ฉ +๋ฌ +๋ด +๋‘ +๋‘‘ +๋‘” +๋‘˜ +๋‘  +๋‘ก +๋‘ฃ +๋‘ฅ +๋‘ฌ +๋’€ +๋’ˆ +๋’ค +๋’ท +๋’น +๋“€ +๋“ˆ +๋“ +๋“œ +๋“ +๋“  +๋“ฃ +๋“ค +๋“ฆ +๋“ฌ +๋“ญ +๋“ฏ +๋“ฑ +๋”” +๋”• +๋”˜ +๋”› +๋”œ +๋”ค +๋”ฅ +๋”ง +๋”จ +๋”ฉ +๋”ช +๋”ฐ +๋”ฑ +๋”ด +๋”ธ +๋•€ +๋•ƒ +๋•„ +๋•… +๋•‹ +๋•Œ +๋• +๋•” +๋•œ +๋•  +๋•ก +๋–„ +๋–  +๋–ก +๋–ค +๋–จ +๋–ซ +๋–ณ +๋–ด +๋–ต +๋–ป +๋–ผ +๋—€ +๋—„ +๋— +๋— +๋˜ +๋˜‘ +๋˜˜ +๋˜ฅ +๋˜ฌ +๋™‡ +๋™ค +๋šœ +๋š +๋š  +๋šค +๋šซ +๋šฑ +๋›ฐ +๋›ด +๋›ธ +๋œ€ +๋œจ +๋œฉ +๋œฌ +๋œฏ +๋œฐ +๋œธ +๋œป +๋„ +๋ˆ +๋Œ +๋” +๋  +๋ค +๋จ +๋ต +๋ผ +๋ฝ +๋ž€ +๋ž„ +๋žŒ +๋ž +๋ž +๋ž +๋ž‘ +๋ž– +๋ž— +๋ž˜ +๋ž™ +๋žœ +๋ž  +๋žจ +๋žฉ +๋žซ +๋žฌ +๋žญ +๋žด +๋žต +๋Ÿ‰ +๋Ÿฌ +๋Ÿญ +๋Ÿฐ +๋Ÿด +๋Ÿผ +๋Ÿฝ +๋Ÿฟ +๋ € +๋  +๋ ‡ +๋ ˆ +๋ ‰ +๋ Œ +๋  +๋ ˜ +๋ ™ +๋ › +๋  +๋ ค +๋ ฅ +๋ จ +๋ ฌ +๋ ด +๋ ต +๋ ท +๋ ธ +๋ น +๋ก€ +๋ก„ +๋กœ +๋ก +๋ก  +๋กค +๋กฌ +๋กญ +๋กฏ +๋กฑ +๋ขฐ +๋ขด +๋ฃ€ +๋ฃŒ +๋ฃก +๋ฃจ +๋ฃฉ +๋ฃฌ +๋ฃฐ +๋ฃธ +๋ฃน +๋ฃป +๋ฃฝ +๋ค„ +๋ค˜ +๋คผ +๋คฝ +๋ฅ˜ +๋ฅ™ +๋ฅœ +๋ฅ  +๋ฅจ +๋ฅญ +๋ฅด +๋ฅต +๋ฅธ +๋ฅผ +๋ฆ„ +๋ฆ… +๋ฆ‡ +๋ฆ‰ +๋ฆŽ +๋ฆ +๋ฆฌ +๋ฆญ +๋ฆฐ +๋ฆด +๋ฆผ +๋ฆฝ +๋ฆฟ +๋ง +๋งˆ +๋ง‰ +๋งŒ +๋งŽ +๋ง +๋ง +๋ง‘ +๋ง˜ +๋ง™ +๋ง› +๋ง +๋งž +๋งก +๋งฃ +๋งค +๋งฅ +๋งจ +๋งฌ +๋งด +๋งต +๋งท +๋งธ +๋งน +๋งบ +๋จ€ +๋จ +๋จธ +๋จน +๋จผ +๋ฉ€ +๋ฉˆ +๋ฉ‹ +๋ฉ +๋ฉŽ +๋ฉ“ +๋ฉ” +๋ฉ• +๋ฉ˜ +๋ฉœ +๋ฉค +๋ฉง +๋ฉฉ +๋ฉฐ +๋ฉฑ +๋ฉด +๋ฉธ +๋ช„ +๋ช… +๋ช‡ +๋ชจ +๋ชฉ +๋ชซ +๋ชฌ +๋ชฐ +๋ชธ +๋ชน +๋ชป +๋ชฝ +๋ซผ +๋ฌ˜ +๋ฌด +๋ฌต +๋ฌถ +๋ฌธ +๋ฌป +๋ฌผ +๋ฌฝ +๋ญ„ +๋ญ‡ +๋ญ‰ +๋ญ +๋ญ +๋ญ” +๋ญ˜ +๋ญก +๋ญฃ +๋ฎˆ +๋ฎŒ +๋ฎ +๋ฎค +๋ฎจ +๋ฎฌ +๋ฎด +๋ฏ€ +๋ฏ„ +๋ฏˆ +๋ฏ +๋ฏธ +๋ฏน +๋ฏผ +๋ฏฟ +๋ฐ€ +๋ฐˆ +๋ฐ‰ +๋ฐ‹ +๋ฐŒ +๋ฐ +๋ฐ +๋ฐ‘ +๋ฐ” +๋ฐ• +๋ฐ– +๋ฐ˜ +๋ฐ› +๋ฐœ +๋ฐ +๋ฐŸ +๋ฐค +๋ฐฅ +๋ฐง +๋ฐฉ +๋ฐญ +๋ฐฐ +๋ฐฑ +๋ฐด +๋ฐธ +๋ฑ€ +๋ฑƒ +๋ฑ… +๋ฑ‰ +๋ฑŒ +๋ฒ„ +๋ฒ… +๋ฒˆ +๋ฒŒ +๋ฒ” +๋ฒ• +๋ฒ— +๋ฒ™ +๋ฒš +๋ฒ  +๋ฒก +๋ฒค +๋ฒง +๋ฒจ +๋ฒณ +๋ฒต +๋ฒผ +๋ฒฝ +๋ณ€ +๋ณ„ +๋ณ +๋ณ +๋ณ +๋ณ‘ +๋ณ• +๋ณœ +๋ณด +๋ณต +๋ณถ +๋ณธ +๋ณผ +๋ด„ +๋ด… +๋ด‡ +๋ด‰ +๋ด +๋ดฃ +๋ดค +๋ดฌ +๋ตˆ +๋ตŒ +๋ต +๋ต™ +๋ตค +๋ถ€ +๋ถ +๋ถ„ +๋ถˆ +๋ถ‰ +๋ถ +๋ถ“ +๋ถ• +๋ถ™ +๋ท” +๋ทฐ +๋ทด +๋ทธ +๋ธŒ +๋ธ +๋ธ” +๋น„ +๋น… +๋นˆ +๋นŒ +๋น” +๋น• +๋น— +๋น™ +๋นš +๋น› +๋น  +๋นก +๋นค +๋นจ +๋นฐ +๋นณ +๋นด +๋นต +๋นป +๋นผ +๋นฝ +๋บ€ +๋บ„ +๋บŒ +๋บ +๋บ +๋บ‘ +๋บจ +๋ป +๋ป‘ +๋ป” +๋ป— +๋ป˜ +๋ปฃ +๋ปค +๋ปฅ +๋ปฌ +๋ผˆ +๋ผ‰ +๋ผ˜ +๋ผ› +๋ฝ€ +๋ฝ +๋ฝˆ +๋ฝ +๋ฝ‘ +๋ฝ• +๋พฐ +๋ฟ… +๋ฟŒ +๋ฟ +๋ฟ +๋ฟ” +๋ฟœ +๋ฟก +์˜ +์œ +์  +์จ +์ฉ +์‚ +์‚‘ +์‚˜ +์‚ฅ +์‚ฌ +์‚ญ +์‚ฏ +์‚ฐ +์‚ด +์‚ถ +์‚ผ +์‚ฝ +์‚ฟ +์ƒ€ +์ƒ +์ƒ… +์ƒˆ +์ƒ‰ +์ƒŒ +์ƒ +์ƒ˜ +์ƒ› +์ƒœ +์ƒ +์ƒค +์ƒฅ +์ƒจ +์ƒฌ +์ƒด +์ƒต +์ƒท +์ƒน +์„€ +์„œ +์„ +์„ž +์„  +์„ฃ +์„ค +์„ฌ +์„ญ +์„ฏ +์„ฐ +์„ฑ +์„ถ +์„ธ +์„น +์„ผ +์…€ +์…ˆ +์…‰ +์…‹ +์…Œ +์… +์…” +์…˜ +์…œ +์…ค +์…ง +์…จ +์…ฉ +์…ฐ +์…ด +์…ธ +์†Œ +์† +์†Ž +์† +์†” +์†œ +์† +์†Ÿ +์†ก +์†ฅ +์‡„ +์‡  +์‡ค +์‡ณ +์‡ผ +์ˆ€ +์ˆ„ +์ˆ +์ˆ +์ˆ‘ +์ˆ˜ +์ˆ™ +์ˆœ +์ˆŸ +์ˆ  +์ˆจ +์ˆฉ +์ˆซ +์ˆญ +์ˆฏ +์ˆฑ +์ˆฒ +์ˆด +์‰ +์‰‘ +์‰˜ +์‰ฌ +์‰ฐ +์‰ด +์‰ผ +์‰ฝ +์‰ฟ +์Šˆ +์Š‰ +์ŠŒ +์Š +์Š˜ +์Š› +์Š +์Šค +์Šฅ +์Šจ +์Šฌ +์Šญ +์Šด +์Šต +์Šท +์Šน +์‹œ +์‹ +์‹  +์‹ฃ +์‹ค +์‹ซ +์‹ฌ +์‹ญ +์‹ฏ +์‹ฑ +์‹ถ +์‹ธ +์‹น +์‹ผ +์Œ€ +์Œˆ +์Œ‰ +์ŒŒ +์Œ +์Œ“ +์Œ” +์Œ• +์Œค +์Œฉ +์จ +์ฉ +์ฌ +์ฐ +์ธ +์น +์ผ +์ฝ +์Ž„ +์Žˆ +์˜ +์™ +์œ +์Ÿ +์  +์ญ +์ด +์ˆ +์ +์ฌ +์‘ค +์‘ฅ +์‘จ +์‘น +์“ฐ +์“ฑ +์“ด +์“ธ +์”€ +์” +์”Œ +์”จ +์”ฉ +์”ฌ +์”ฐ +์”ธ +์”น +์”ป +์”ฝ +์•„ +์•… +์•ˆ +์•‰ +์•Š +์•Œ +์•Ž +์•“ +์•” +์•• +์•— +์•˜ +์•™ +์•œ +์•ž +์•  +์•ก +์•ค +์•จ +์•ฐ +์•ฑ +์•ณ +์•ด +์•ต +์•ผ +์•ฝ +์–€ +์–„ +์–‡ +์–Œ +์– +์– +์–‘ +์–• +์–— +์–˜ +์–ด +์–ต +์–ธ +์–น +์–ป +์–ผ +์–ฝ +์—„ +์—… +์—† +์—‡ +์—ˆ +์—‰ +์—Š +์—Œ +์—Ž +์— +์—‘ +์—” +์—˜ +์—  +์—ก +์—ฃ +์—ฅ +์—ฌ +์—ญ +์—ฎ +์—ฐ +์—ด +์—ท +์—ผ +์—ฝ +์—พ +์—ฟ +์˜€ +์˜ +์˜… +์˜† +์˜‡ +์˜ˆ +์˜Œ +์˜ +์˜™ +์˜› +์˜ค +์˜ฅ +์˜จ +์˜ฌ +์˜ญ +์˜ฎ +์˜ณ +์˜ด +์˜ต +์˜ท +์˜น +์˜ป +์™€ +์™ +์™„ +์™ˆ +์™‘ +์™“ +์™” +์™• +์™œ +์™  +์™ค +์™ธ +์™ผ +์š” +์š• +์š˜ +์šœ +์šค +์šฅ +์šง +์šฉ +์šฌ +์šฐ +์šฑ +์šด +์šธ +์›€ +์› +์›ƒ +์›… +์›Œ +์› +์› +์›” +์›œ +์›Ÿ +์›  +์›ก +์›จ +์›ฉ +์›ฌ +์›ฐ +์›ธ +์›น +์œ„ +์œ… +์œˆ +์œŒ +์œ” +์œ• +์œ— +์œ™ +์œ  +์œก +์œค +์œจ +์œณ +์œต +์œท +์œผ +์œฝ +์€ +์„ +์Š +์Œ +์ +์‘ +์˜ +์ด +์ต +์ธ +์ผ +์ฝ +์žƒ +์ž„ +์ž… +์ž‡ +์žˆ +์ž‰ +์žŠ +์žŒ +์žŽ +์ž +์ž‘ +์ž” +์ž– +์ž˜ +์ž  +์žก +์žฃ +์žค +์žฅ +์žฆ +์žฌ +์žญ +์žฐ +์žด +์žผ +์žฝ +์žฟ +์Ÿ +์Ÿˆ +์ŸŒ +์Ÿ +์Ÿค +์ € +์  +์ „ +์ ˆ +์ Š +์  +์ ‘ +์ “ +์ • +์ – +์ œ +์  +์   +์ ค +์ ฌ +์ ญ +์ ฏ +์ ธ +์ ผ +์ก‹ +์กŒ +์ก +์ก” +์กฐ +์กฑ +์กด +์กธ +์ข€ +์ข +์ขƒ +์ข… +์ข† +์ข‡ +์ข‹ +์ขŒ +์ข” +์ขก +์ฃ„ +์ฃ— +์ฃ  +์ฃค +์ฃต +์ฃผ +์ฃฝ +์ค€ +์ค„ +์คŒ +์ค +์ค +์ค‘ +์ค˜ +์คซ +์คฌ +์ฅ +์ฅ” +์ฅ˜ +์ฅฌ +์ฅฐ +์ฅด +์ฆ +์ฆˆ +์ฆ‰ +์ฆŒ +์ฆ +์ฆ˜ +์ฆ™ +์ฆ +์ง€ +์ง +์ง„ +์งˆ +์งŠ +์ง +์ง‘ +์ง“ +์ง• +์ง– +์ง™ +์งš +์งœ +์ง +์ง  +์งข +์งค +์งง +์งฌ +์งญ +์งฐ +์งฑ +์งธ +์งน +์จ‹ +์จŒ +์จ +์จ” +์จฉ +์ฉŒ +์ฉ +์ฉ +์ฉ” +์ฉœ +์ฉ +์ฉก +์ฉจ +์ช„ +์ช˜ +์ชผ +์ชฝ +์ซ€ +์ซ„ +์ซŒ +์ซ‘ +์ซ’ +์ซ“ +์ซ˜ +์ซ™ +์ฌ +์ญˆ +์ญ‰ +์ญ +์ญ™ +์ญ +์ญค +์ฎธ +์ฏ” +์ฏค +์ฏง +์ฐŒ +์ฐ +์ฐ +์ฐ” +์ฐœ +์ฐ +์ฐก +์ฐข +์ฐง +์ฐจ +์ฐฉ +์ฐฌ +์ฐฎ +์ฐฐ +์ฐธ +์ฐน +์ฐป +์ฐผ +์ฐฝ +์ฐพ +์ฑ„ +์ฑ… +์ฑˆ +์ฑŒ +์ฑ” +์ฑ• +์ฑ— +์ฑ˜ +์ฑ™ +์ฑ  +์ฑฑ +์ฒ˜ +์ฒ™ +์ฒœ +์ฒ  +์ฒจ +์ฒฉ +์ฒซ +์ฒญ +์ฒด +์ฒต +์ฒธ +์ฒผ +์ณ‡ +์ณ‰ +์ณ +์ณฃ +์ณค +์ดˆ +์ด‰ +์ดŒ +์ด +์ด˜ +์ด› +์ด +์ดค +์ดจ +์ดฌ +์ดต +์ตœ +์ตธ +์ถ” +์ถ• +์ถ˜ +์ถœ +์ถค +์ถฅ +์ถง +์ถฉ +์ถฐ +์ท„ +์ทŒ +์ทจ +์ธ„ +์ธ  +์ธก +์ธจ +์ธฐ +์ธต +์น˜ +์น™ +์นœ +์น  +์นก +์นจ +์นฉ +์นซ +์นญ +์นด +์นต +์นธ +์นผ +์บ„ +์บ… +์บ‡ +์บ‰ +์บ +์บ” +์บ˜ +์บ  +์บก +์บฃ +์บฅ +์บฌ +์ปค +์ปฅ +์ปจ +์ปซ +์ปฌ +์ปด +์ปต +์ปท +์ปธ +์ปน +์ผ€ +์ผ„ +์ผˆ +์ผ +์ผ“ +์ผœ +์ผ  +์ผค +์ผฐ +์ฝ” +์ฝ• +์ฝ˜ +์ฝœ +์ฝค +์ฝฅ +์ฝง +์ฝฉ +์ฝฐ +์ฝฑ +์ฝด +์ฝธ +์พ… +์พŒ +์พจ +์พฐ +์ฟ„ +์ฟ  +์ฟก +์ฟค +์ฟจ +์ฟฐ +์ฟฑ +์ฟต +์ฟผ +ํ€€ +ํ€„ +ํ€˜ +ํ€ด +ํ€ต +ํ€ธ +ํ€ผ +ํ +ํ˜ +ํฌ +ํญ +ํฐ +ํด +ํผ +ํฝ +ํ‚ +ํ‚ค +ํ‚ฅ +ํ‚จ +ํ‚ฌ +ํ‚ด +ํ‚ต +ํ‚ท +ํ‚น +ํƒ€ +ํƒ +ํƒ„ +ํƒˆ +ํƒ‰ +ํƒ +ํƒ‘ +ํƒ“ +ํƒ” +ํƒ• +ํƒœ +ํƒ +ํƒ  +ํƒค +ํƒฌ +ํƒญ +ํƒฏ +ํƒฐ +ํƒฑ +ํ„ฐ +ํ„ฑ +ํ„ด +ํ„ธ +ํ…€ +ํ… +ํ…ƒ +ํ…„ +ํ…… +ํ…Œ +ํ… +ํ… +ํ…” +ํ…œ +ํ… +ํ…Ÿ +ํ…ก +ํ…จ +ํ…ผ +ํ†ˆ +ํ†  +ํ†ก +ํ†ค +ํ†จ +ํ†ฐ +ํ†ฑ +ํ†ณ +ํ†ต +ํ‡ด +ํˆ‡ +ํˆฌ +ํˆญ +ํˆฐ +ํˆด +ํˆผ +ํ‰ +ํ‰ˆ +ํ‰œ +ํ‰ค +ํŠ€ +ํŠ„ +ํŠˆ +ํŠ• +ํŠœ +ํŠ  +ํŠค +ํŠฌ +ํŠธ +ํŠน +ํŠผ +ํŠฟ +ํ‹€ +ํ‹ˆ +ํ‹‹ +ํ‹” +ํ‹ฐ +ํ‹ฑ +ํ‹ด +ํ‹ธ +ํŒ€ +ํŒ +ํŒ… +ํŒŒ +ํŒ +ํŒŽ +ํŒ +ํŒ” +ํŒœ +ํŒ +ํŒŸ +ํŒ  +ํŒก +ํŒฅ +ํŒจ +ํŒฉ +ํŒฌ +ํŒฐ +ํŒธ +ํŒน +ํŒป +ํŒฝ +ํผ +ํฝ +ํŽ€ +ํŽ„ +ํŽŒ +ํŽ +ํŽ +ํŽ‘ +ํŽ˜ +ํŽ™ +ํŽœ +ํŽ  +ํŽจ +ํŽฉ +ํŽซ +ํŽญ +ํŽด +ํŽธ +ํŽผ +ํ„ +ํˆ +ํ‰ +ํ +ํฌ +ํญ +ํฐ +ํด +ํผ +ํฟ +ํ +ํ‘€ +ํ‘œ +ํ‘ฏ +ํ‘ธ +ํ‘น +ํ‘ผ +ํ’€ +ํ’ˆ +ํ’‹ +ํ’ +ํ“Œ +ํ“จ +ํ“ฐ +ํ“ธ +ํ”„ +ํ”ˆ +ํ”Œ +ํ”” +ํ”• +ํ”ผ +ํ”ฝ +ํ•€ +ํ•„ +ํ•Œ +ํ• +ํ• +ํ•‘ +ํ•˜ +ํ•™ +ํ•œ +ํ•  +ํ•ฅ +ํ•จ +ํ•ฉ +ํ•ซ +ํ•ญ +ํ•ณ +ํ•ด +ํ•ต +ํ•ธ +ํ•ผ +ํ–„ +ํ–… +ํ–‡ +ํ–ˆ +ํ–‰ +ํ– +ํ–ฅ +ํ—ˆ +ํ—‰ +ํ—Œ +ํ— +ํ—˜ +ํ—™ +ํ—› +ํ— +ํ—ค +ํ—ฅ +ํ—จ +ํ—ฌ +ํ—ด +ํ—ต +ํ—ท +ํ—น +ํ—ฟ +ํ˜€ +ํ˜ +ํ˜„ +ํ˜ˆ +ํ˜ +ํ˜‘ +ํ˜“ +ํ˜” +ํ˜• +ํ˜œ +ํ˜ธ +ํ˜น +ํ˜ผ +ํ™€ +ํ™ˆ +ํ™‰ +ํ™‹ +ํ™ +ํ™‘ +ํ™” +ํ™• +ํ™˜ +ํ™œ +ํ™ง +ํ™ฉ +ํ™ฐ +ํ™ฑ +ํšƒ +ํšŒ +ํš +ํšŸ +ํšก +ํšจ +ํ›„ +ํ›… +ํ›ˆ +ํ›Œ +ํ›‘ +ํ›” +ํ›— +ํ›™ +ํ›  +ํ›ค +ํ›จ +ํ›ผ +ํœ€ +ํœ‘ +ํœ˜ +ํœ™ +ํœœ +ํœ  +ํœฉ +ํœด +ํœผ +ํ„ +ํ‰ +ํ +ํ‘ +ํ” +ํ˜ +ํ™ +ํ  +ํก +ํฅ +ํฉ +ํฌ +ํฐ +ํžˆ +ํž‰ +ํžŒ +ํž +ํž˜ +ํž™ +ํž› +ํž +๏คŠ +๏ค +๏ค” +๏ค˜ +๏ค› +๏คฏ +๏คณ +๏คด +๏คน +๏ฅ +๏ฅœ +๏ฅฆ +๏ฅง +๏ฅฒ +๏ฅธ +๏ฅบ +๏ฅผ +๏ฆ€ +๏ฆ +๏ฆŒ +๏ฆŽ +๏ฆ— +๏ฆš +๏ฆœ +๏ฆŸ +๏ฆณ +๏ฆถ +๏งƒ +๏ง„ +๏ง‡ +๏ง‰ +๏งŠ +๏ง +๏ง +๏ง‘ +๏ง“ +๏ง˜ +๏ง +๏งก +๏งค +๏งช +๏งด +๏งถ +๏งท +๏งบ +๏งพ +๏จ„ +๏ธ +๏ผ +๏ผ‚ +๏ผ„ +๏ผ… +๏ผ† +๏ผ‡ +๏ผˆ +๏ผ‰ +๏ผŠ +๏ผ‹ +๏ผŒ +๏ผ +๏ผŽ +๏ผ +๏ผš +๏ผ› +๏ผœ +๏ผ +๏ผž +๏ผŸ +๏ผป +๏ผฝ +๏ผฟ +๏ฝ‡ +๏ฝ +๏ฝ” +๏ฝœ +๏ฝž +๏ฝข +๏ฝฃ +๏ฝฅ +๏ฟฃ +๏ฟฆ +๐ŸŒŸ +๐ŸŒธ +๐ŸŒผ +๐ŸŒฟ +๐Ÿƒ +๐Ÿ“ +๐Ÿž +๐Ÿซ +๐Ÿณ +๐Ÿด +๐ŸŽ +๐ŸŽ‰ +๐ŸŽถ +๐Ÿƒ +๐Ÿป +๐Ÿผ +๐Ÿถ +๐Ÿท +๐Ÿฝ +๐Ÿ‘€ +๐Ÿ‘ˆ +๐Ÿ‘‰ +๐Ÿ‘Š +๐Ÿ‘‹ +๐Ÿ‘Œ +๐Ÿ‘ +๐Ÿ‘ +๐Ÿ‘ง +๐Ÿ‘ฉ +๐Ÿ‘ถ +๐Ÿ‘ป +๐Ÿ’ƒ +๐Ÿ’“ +๐Ÿ’• +๐Ÿ’– +๐Ÿ’— +๐Ÿ’™ +๐Ÿ’š +๐Ÿ’› +๐Ÿ’œ +๐Ÿ’ฆ +๐Ÿ’ช +๐Ÿ“Œ +๐Ÿ”ฅ +๐Ÿ–ค +๐Ÿ˜€ +๐Ÿ˜ +๐Ÿ˜‚ +๐Ÿ˜ƒ +๐Ÿ˜„ +๐Ÿ˜… +๐Ÿ˜† +๐Ÿ˜‰ +๐Ÿ˜Š +๐Ÿ˜‹ +๐Ÿ˜Œ +๐Ÿ˜ +๐Ÿ˜Ž +๐Ÿ˜ +๐Ÿ˜– +๐Ÿ˜˜ +๐Ÿ˜™ +๐Ÿ˜š +๐Ÿ˜› +๐Ÿ˜ +๐Ÿ˜ข +๐Ÿ˜ฅ +๐Ÿ˜ญ +๐Ÿ˜ฑ +๐Ÿ˜ณ +๐Ÿ™‚ +๐Ÿ™„ +๐Ÿ™† +๐Ÿ™‡ +๐Ÿ™ˆ +๐Ÿ™Š +๐Ÿ™‹ +๐Ÿ™Œ +๐Ÿ™ +๐Ÿค” +๐Ÿค— +๐Ÿคฃ +๐Ÿคค +๐Ÿคฆ +๐Ÿคฉ +๐Ÿคญ +๐Ÿฅ„ +๐Ÿฅฐ +๐Ÿงก +##n +##u +##k +##o +##I +##N +##a +##l +##p +##h +##y +##i +##s +##์‚ฌ +##ํ‡ด +##๊นŒ +##์ง€ +##m +##c +##t +##e +##์—ฐ +##๊ณ„ +##์ž +##๊พธ +##์ž‘ +##๊ตฌ +##ํฌ +##๋„ +##๋„˜ +##๋„ˆ +##๋ฌด +##r +##g +##ใ…‹ +##๋Œ€ +##๋ถ +##ํŠน +##๋‹จ +##์ด +##๋ฆฌ +##8 +##0 +##4 +##1 +##9 +##v +##์–‘ +##์‚ฐ +##์—… +##์ฉ +##์™ธ +##๊ต +##ใ† +##๊ฒฝ +##์ฐฐ +##์˜ +##๋ฌธ +##๋ฃก +##โ„ƒ +##d +##๋ด +##ํ‰ +##์‹œ +##O +##K +##A +##j +##ํŒ” +##๊ผญ +##z +##ใ…œ +##ใ…  +##ํ˜„ +##์œ„ +##ํšŒ +##๋ณต +##์ดˆ +##๊ธฐ +##์— +##์ˆ™ +##ํฌ +##๐Ÿ˜‹ +##๊ฑด +##์ถ• +##ํ†  +##๋ชฉ +##๋ถ€ +##์„œ +##w +##ํŒ +##b +##๋‹ฌ +##์ˆ˜ +##์Šน +##รณ +##๊ธˆ +##์„ฑ +##ํ™” +##์„ +##5 +##์ง„ +##q +##P +##E +##๋…ธ +##์•ผ +##C +##๋ ˆ +##์—ญ +##ํ˜• +##B +##F +##๋กœ +##์•„ +##ํ‹ฐ +##๋ณด +##์Šค +##๋‹ˆ +##์นœ +##๊ณ  +##๋ ค +##์ธ +##๋ฐฑ +##์ œ +##๋„ +##์ค‘ +##R +##ใ™ +##ใ‚Œ +##ใช +##ใ„ +##๋ง +##๋ผ +##ํ…Œ +##์Ž„ +##์—ฌ +##์—ด +##ํƒˆ +##๋“ค +##ํ—Œ +##์‹ +##ํ•  +##๋ž˜ +##์š” +##๊ตฐ +##ํฌ +##๊น€ +##๋ช… +##รค +##f +##Y +##M +##S +##U +##๋ฒ„ +##๋ ธ +##๋‚˜ +##๋ฏธ +##๋‚จ +##๋† +##์œ  +##์ฃผ +##T +##ํœด +##์› +##ํ•ด +##์ฐฝ +##์–ธ +##์–ด +##๋ก  +##๊ณผ +##7 +##x +##ใ…ˆ +##ใ„น +##ํ•˜ +##์ปฌ +##๋Ÿฌ +##์ ธ +##๋ฉด +##๊ฐ +##์ +##๋ฆฝ +##์†Œ +##ํŽธ +##๊ธ‰ +##ุง +##์˜ค +##๊ทธ +##๋ฅด +##๊ฒŒ +##์˜ +##์† +##์„ค +##๋“ +##ํ‚ค +##ํ•ญ +##๊ณต +##์ „ +##์ƒ +##ํ™˜ +##๊ฒฐ +##๊ถŒ +##์ผ +##๋ณธ +##์œค +##์ • +##๋งˆ +##G +##ํ•™ +##D +##๊ฑฐ +##์‹ฑ +##๋ฐ” +##ํ™ +##๋นˆ +##์‹ค +##์šฉ +##ะพ +##ะป +##ัŒ +##ัˆ +##ะต +##ะฒ +##ะธ +##ั +##ั‚ +##ะบ +##๋ง +##ํ +##๋  +##๋งด +##2 +##์•ˆ +##์ฐพ +##๋Š” +##๋‹ค +##๋™ +##๋ผ +##์นด +##๋žจ +##๋ฏผ +##๋”” +##ํ™ฉ +##๊ธ€ +##์ž˜ +##๋ฅ™ +##3 +##ฮบ +##์šด +##์™€ +##์งœ +##๋ณ„ +##์Œ +##์‹  +##ใ…‘ +##ใ…“ +##ใ…• +##ใ…— +##์ƒ‰ +##์š• +##ใƒฌ +##ใ‚ค +##ใƒผ +##๋ฌผ +##๊ฐ€ +##๋ฐ +##์„ธ +##๊ฐœ +##ํ˜ +##๊ตญ +##ํ˜ธ +##์•” +##๊ฐ +##๋Š˜ +##์ƒ› +##๊ฐ„ +##์–ผ +##๊ตด +##ํŒŒ +##ํŒจ +##์˜€ +##๋ชจ +##ํ„ฐ +##์‡ผ +##๋žŒ +##L +##์ฒญ +##์žฅ +##๋“ฑ +##์ทจ +##๋ฝ +##์œก +##ํ’ +##์Šต +##๋ฒ” +##์…‰ +##ํ†ต +##๋ น +##์„  +##์•ฝ +##๋ ฅ +##์ฒด +##์  +##๋ฃŒ +##์žฌ +##๋‹˜ +##๋‘ +##ํ•จ +##์ณ +##ใ…… +##ํƒ€ +##๊ทน +##์šฐ +##๋‹น +##ํ–‰ +##๊ด€ +##W +##ใ‚‹ +##ใ‚ˆ +##ใ† +##ใซ +##๋“  +##ํƒ +##๋งŒ +##์ฒœ +##๋ž‘ +##๋Š +##ํŠธ +##๋‘ฅ +##๋‚ด +##ํ•œ +##๋ถ„ +##์€ +##์กฐ +##๋Œ„ +##์ธ  +##์ขŒ +##์ฏ” +##์ˆœ +##๐Ÿ’“ +##ใ… +##๋ž— +##ใ…Ž +##๋• +##์šธ +##๋“œ +##๊ด‘ +##๊ณก +##๋ก€ +##๋ณ‘ +##์‚ด +##ใฎ +##ใฏ +##ใƒˆ +##ใƒ +##๐Ÿ˜ญ +##๐Ÿ˜ข +##์ถœ +##๋‚  +##๋ฐค +##๋•… +##์ฝฉ +##๊ฐ™ +##ํ„ธ +##๋ฒค +##์ฒ˜ +##์ž… +##๋ฅผ +##๊ณจ +##์ง‘ +##ฤ +##ฤซ +##๋ด‰ +##๋นŒ +##์•… +##๋“ฌ +##ํญ +##๋ Œ +##์น˜ +##์ข… +##ํ˜‘ +##๊ทœ +##์ฐ +##์ค€ +##๋ฐ• +##์ ‘ +##ํšจ +##๋ฆฐ +##์„ +##์˜ฅ +##์ตœ +##์งˆ +##๋žต +##๋…ผ +##์Ÿ +##H +##์™” +##๋ณ€ +##๊ทผ +##๋ฐฉ +##๋‚ญ +##๋–  +##ะด +##ะฝ +##ะฐ +##ั +##๋…น +##์˜ˆ +##์™„ +##์™• +##ํƒœ +##๋ฐœ +##์ƒจ +##๋ฐฐ +##๋… +##ใ‚‚ +##รฉ +##๊ฐ• +##์ง +##์‘ +##์˜Œ +##ใ…Œ +##ใ„ฑ +##ใ„ท +##ํ +##ํ•ฉ +##์œต +##์ – +##๋ฐ˜ +##ํ˜ผ +##ฯ‰ +##ฯ€ +##ฮฑ +##ฯƒ +##ฮน +##ฯ‚ +##๋ฉ€ +##๋ง‰ +##ํ”ผ +##๋ฒ… +##์ฑ… +##์ž„ +##์ƒค +##ํ…œ +##์…ฐ +##ํ”„ +##๊ฒ… +##๋ฝ‘ +##๋Ÿฝ +##์ฒผ +##ใ…Š +##Z +##V +##์•ก +##6 +##์ƒ +##ํ™œ +##ํ—˜ +##ํƒ• +##์นผ +##์ฟ  +##์ฝ” +##๋ฉค +##๋กฑ +##ํ„ด +##์›… +##๋„ค +##๋ž€ +##ํ…” +##ใ‚“ +##ใ‹ +##์˜จ +##๊ฟˆ +##๊ฝƒ +##ใƒช +##ใ‚น +##๋ฒ• +##ร— +##๐Ÿ’š +##๐Ÿ’› +##์—ฅ +##๋น„ +##ํˆฌ +##๋ฉฐ +##์ฆ +##์œจ +##๊ดด +##์ฒ  +##๋ชฐ +##ํ•„ +##ํ•€ +##๊ธธ +##์ž” +##ํ‘œ +##ํŽ˜ +##๐Ÿ™ +##๐Ÿป +##๐Ÿ™‡ +##์ˆ  +##๊ท€ +##์›Œ +##๋ฌ˜ +##์—ฝ +##์†ก +##ํ–ฅ +##ํ‘ธ +##ํด +##๋กญ +##๋ฌต +##ํŒก +##๋ธ” +##๋ฃจ +##๋…€ +##๋งค +##๋ˆ„ +##๊ธด +##์œ™ +##ใ…‡ +##์ถฐ +##ใ…” +##ํˆ‡ +##ะถ +##๋„ท +##๋ชฌ +##ํƒ‘ +##์ถ” +##์ฐฌ +##ํ˜œ +##์•™ +##๋ฆ„ +##๋•Œ +##ํƒ +##๊ฟ‰ +##๋‚œ +##ํฅ +##์กฑ +##๋ฆผ +##์ง• +##๋œ +##ใƒญ +##์–˜ +##์ค„ +##๋ ด +##์Š +##๊ฒฉ +##ํ˜น +##๋Ÿผ +##๋ณผ +##๋ฅ˜ +##ํ›ค +##์œผ +##์ฝค +##๋ฃน +##๐Ÿ˜‚ +##๋ช‡ +##๋ฒฝ +##๋ จ +##์กด +##์ƒˆ +##๋ณ +##ั€ +##์‹ฌ +##ํœ˜ +##์—ˆ +##ํ—จ +##์‚ผ +##๋‹ฅ +##๋ฆด +##ใ…ƒ +##์—  +##์šฑ +##์‹ถ +##๋งจ +##์ฆˆ +##ํ—ˆ +##์ ˆ +##ใ… +##แ„’ +##รก +##ใƒฉ +##๊น” +##์ถฉ +##ใ…‚ +##๋ฐ› +##์•˜ +##์ง“ +##๋•ก +##๋ฝ• +##ํ†ฑ +##ํŒ… +##ํ“จ +##์•• +##์›€ +##์ข‹ +##๊ฒŸ +##๋ฃฌ +##์ฝ +##์•  +##์…˜ +##ํ  +##ั‡ +##ั‹ +##๋… +##๋‹ญ +##๊ฐˆ +##๋ฐฅ +##์”จ +##์—ผ +##์„ญ +##์ถ˜ +##์ฑ„ +##๋†€ +##ะน +##ํ†ค +##์  +##ํ‹ด +##๐Ÿฅ„ +##๐Ÿ’™ +##รผ +##๋ฉ” +##์ผ€ +##ํ… +##ะฟ +##Q +##๋ฅœ +##์ฐจ +##๊ฒ€ +##ใ“ +##๐Ÿคฉ +##๋น  +##๊ฟ€ +##๋ง› +##์—„ +##ใ‚ซ +##ํ‹€ +##๋žœ +##์›จ +##์ฒ™ +##๋ฆ‰ +##์ปค +##์ฐŒ +##๋‚™ +##ํŒ€ +##์—˜ +##์—” +##ํ•‘ +##ใƒซ +##๐Ÿ™† +##์นธ +##์กธ +##ํ’ˆ +##๊พผ +##์Šˆ +##์ด +##๋…„ +##์ธต +##๊ป„ +##์‹ฏ +##ัƒ +##ะผ +##ํ”Œ +##๋œป +##๋”˜ +##์›” +##๋”ฐ +##J +##์•ฐ +##์“ฐ +##๐Ÿฅฐ +##๐Ÿคญ +##๋ขฐ +##์†œ +##๋ด„ +##๋‹ด +##๋ž„ +##๋“ฏ +##๋”ฉ +##๋ถ™ +##๐Ÿคฃ +##๐Ÿ˜ +##๐Ÿ’ช +##๋จธ +##๋˜ +##์—‡ +##์ˆญ +##๋น› +##ํ–‡ +##๊ฒฌ +##์ถค +##์† +##ใƒž +##ใƒณ +##ใ…ก +##๋นจ +##๋งž +##ใƒ† +##์ต +##์•ž +##์•Œ +##ใ… +##รจ +##๋Ÿฐ +##๋„จ +##๋ฝ€ +##ํŽ€ +##๊บ„ +##๋ก +##๊บผ +##์กŒ +##๋ฉ˜ +##์ € +##๊ด˜ +##๋œจ +##๋ฅธ +##ํฐ +##๋Šฅ +##ํŽจ +##ใ… +##๋ˆ  +##X +##์•— +##ํ™• +##๐Ÿ˜ +##๐Ÿ‘ +##โ—€ +##ใ— +##์„ฌ +##๋„ +##ใ• +##ใŸ +##์žก +##ํƒ„ +##ะ +##ะš +##๊ฟ +##๋งฅ +##๋˜ +##โˆผ +##๋š +##์ฐฉ +##์ฃ„ +##์ปจ +##๋ชฝ +##๋” +##๋Š‰ +##์‚ญ +##๋ถˆ +##์ชผ +##๋กฌ +##ํŽœ +##รง +##ํ +##์›ƒ +##์ดŒ +##๋ญ„ +##โ…  +##โ…ก +##์ฅ +##ํ˜€ +##๊ฑฑ +##๋ ฌ +##ฯ +##๋‚ฌ +##๊ผด +##๋ฒ  +##โ € +##ํผ +##ั† +##ใƒ‹ +##๏ฟฆ +##๋ซผ +##๋ƒ +##์šฅ +##์ฃฝ +##ฮฟ +##ฯ„ +##ฮฝ +##๋Ÿ‰ +##๊ฑธ +##๋ ‰ +##๊ฐ“ +##ํžˆ +##๐Ÿ–ค +##ใ‚ +##ใ‚Š +##ใŒ +##ใจ +##ใ  +##๊พ€ +##๊ผฌ +##๐Ÿ˜› +##์„ผ +##ใ„ฒ +##์ฅฌ +##๋ฎค +##๋ธŒ +##๊ฒ  +##์ˆ€ +##๋จผ +##์œฝ +##์ด‰ +##๊ธฟ +##๋งต +##ใ +##์Šฌ +##๋ฅจ +##๐Ÿ˜„ +##์ดจ +##๊ธ” +##ํ” +##ฮธ +##์Œˆ +##๋ผˆ +##๋ทฐ +##๋ฑ… +##๐Ÿ’• +##์˜ฌ +##๋ˆ +##์ถง +##์Šจ +##ั… +##๋†ˆ +##ํ‘ +##์…€ +##๋“ˆ +##๋“€ +##ํ›„ +##์พŒ +##์‰ฌ +##ํŠผ +##๋‹ต +##ํŒŸ +##ํ›ˆ +##๐Ÿ˜– +##๊ฒจ +##๊ณณ +##๋ค +##๋Ÿญ +##์˜ต +##์…” +##์คฌ +##๋Œ +##๋ง +##์†” +##๊น… +##์ฒจ +##ํ‹ธ +##๋นฐ +##๋ฒŒ +##์ง +##์จ +##๋ˆ +##ใฃ +##๋ž +##๋จน +##โ…ค +##๋  +##๋ƒ„ +##๋žซ +##์—‰ +##์งค +##์ปด +##ํŒฉ +##๋ฐ€ +##๋ฒˆ +##์—ก +##๋ˆˆ +##๋œ +##๋ƒฅ +##๋–ผ +##ใ„ด +##์นจ +##์˜ +##๋‚ฉ +##ะฑ +##๋น— +##๋ค„ +##ใ‚’ +##ใ‘ +##์บ +##๋‚„ +##๋งน +##๊ถ +##ํŠ  +##๋ฅ  +##ยณ +##โ—‹ +##์ƒฌ +##๐Ÿ˜˜ +##๐Ÿค— +##์ผœ +##๐Ÿงก +##๋‡จ +##๋” +##์Œ€ +##์Œ +##๋นก +##๋“ฃ +##๋Œˆ +##โ–ช +##โ‘ก +##โ‘ข +##๊ถค +##ํŽ  +##ํ–ˆ +##์›  +##๐Ÿ˜† +##๐Ÿ’– +##์Ÿค +##๊ณค +##๐Ÿ‘Š +##์น™ +##๊นจ +##ใค +##๋  +##์ฐธ +##๋‚Œ +##์—† +##๋Š‘ +##๋žญ +##๋‹› +##๋ง +##๋‹™ +##๋žฉ +##์˜ท +##๋ +##๋˜ +##์ก‹ +##์ฝœ +##์ž‰ +##๋„› +##๋ธ +##๊ณฑ +##๋ › +##์ค˜ +##์˜น +##ํŒฝ +##์น  +##โ–  +##์ฃ  +##์ธก +##๋ฒ™ +##๋” +##๋‰ด +##ํ‚จ +##์ž‡ +##์‹ผ +##๐Ÿ’— +##ใƒ  +##ใ‚ท +##์งฌ +##์žŽ +##์ฆ™ +##๋ƒ‰ +##๐ŸŽ‰ +##๐Ÿซ +##๋ฅ +##ํž˜ +##๋น™ +##๋š  +##๋‹ +##๊ฒ +##์นญ +##๊ตฝ +##ํ‹ฑ +##ใ‚ฟ +##๋ฐ +##โ…“ +##๋ญ‡ +##ํƒ” +##๋Š„ +##๋†‹ +##๋ฒผ +##ํ—ค +##ํ•ต +##ํ‚ด +##์บ˜ +##๋ฉฉ +##ํš +##ํ€ด +##์žˆ +##๋ž™ +##๋ƒˆ +##์บฌ +##๐Ÿถ +##ใˆ +##๋˜˜ +##ฮฒ +##๋ป˜ +##ใ +##ใก +##ใŠ +##ใพ +##๋ฐ– +##๐Ÿ‘ +##๋ฃฉ +##ใ‚‰ +##ใ‚ญ +##ํšก +##์ฐฎ +##ํ˜ˆ +##์ข€ +##์ฆ˜ +##์บ‰ +##๊ฒƒ +##์ฆ‰ +##ใฆ +##๋‹ +##์œท +##ะท +##์ ค +##์ผ“ +##์ฝด +##๊ธ‹ +##ํผ +##ใƒ• +##๋ฉœ +##๊ฐ– +##์œณ +##์ฉŒ +##๐Ÿž +##ใƒก +##ํ— +##ะณ +##์ฒฉ +##๋–ป +##๐Ÿ˜ +##ํ™ˆ +##ํ•ธ +##๊ป˜ +##์‡  +##๊ฒ„ +##๋‹ผ +##๐Ÿ˜š +##๐Ÿ˜™ +##๋‘˜ +##์™ˆ +##๋‚ผ +##๋ +##ใฌ +##๋  +##๊ปด +##ใƒƒ +##์ชฝ +##์ซŒ +##๋šœ +##์ฆ +##๐Ÿผ +##รถ +##ใง +##๋„น +##ํŒฌ +##๐Ÿ˜… +##๐Ÿ™Š +##์บ  +##๋ป +##์ฉ +##๊ฐ‘ +##๊ฒ +##๋Š  +##์ง +##๐Ÿ‘Œ +##์–ต +##ฮป +##๋šฑ +##๋‹‰ +##ํ†ฐ +##๋‡Œ +##แ†ž +##๋ฒจ +##๊ฐ +##๋”ธ +##์—‘ +##๋ชธ +##๊ท  +##๋ฑŒ +##์   +##๋ฟŒ +##๋…œ +##ํœ  +##๋ฉธ +##์ปต +##ู„ +##๋ฆฟ +##์งš +##ใ…ฃ +##ใƒ– +##ใ‚ฏ +##๊พน +##๋ชป +##๋ฃธ +##๊ดœ +##์‰ฝ +##๋ฆญ +##ํƒฑ +##ํก +##๋ฆ‡ +##ฮต +##ํ™€ +##์•œ +##ํŠœ +##รญ +##๐ŸŒŸ +##์ฟ„ +##์  +##๋น… +##ํ‰ +##๋ด +##ใ‚ฃ +##๊น +##๋ˆ— +##๋ญ‰ +##๊นก +##๋กฏ +##์—Œ +##ํƒ +##ใ‚ +##์บ” +##ฮท +##ใ…› +##์œŒ +##ํ… +##๐Ÿ”ฅ +##๐Ÿ‘ˆ +##์ผ„ +##๐Ÿ˜Š +##๐Ÿ˜ฑ +##ฮณ +##๊นป +##์—ฃ +##๋‹ซ +##๊ณฝ +##๋นผ +##๋Šฌ +##์‚ฟ +##์บ‡ +##๋•ƒ +##๐Ÿ˜Œ +##๊ณฐ +##๊ฒธ +##๋ฐ‘ +##์ƒ˜ +##๋ˆ” +##๋„ฌ +##์…ธ +##์ฝ˜ +##๋’ท +##ํ›Œ +##ํ™ง +##๐Ÿ‘‰ +##ํ†จ +##๋—„ +##์นต +##๋žฌ +##๋ชซ +##์•ต +##๋œฉ +##์ซ€ +##ํˆญ +##์…จ +##์ณฃ +##์ฟต +##ํ”ฝ +##๊ฒผ +##แ†ก +##๐Ÿ’ƒ +##๋ง™ +##์น +##๋Œ +##์ž– +##์™œ +##์Ÿˆ +##๊ถ +##์‡„ +##๋‘” +##์บ„ +##๐Ÿ˜€ +##ํ€ธ +##๐Ÿ‘‹ +##์™“ +##์ฑ  +##ํž +##โ™ช +##๐Ÿ™ˆ +##ะ  +##์–„ +##์šฌ +##๊ทค +##๋ญ +##ใŽก +##์…œ +##ํ’€ +##๋Ÿด +##์  +##์‹ญ +##ใƒ‰ +##ํ—› +##๋งŽ +##๋†“ +##๋‘  +##์ญ +##์นฉ +##๋Œ” +##๋ฏ€ +##์ผˆ +##๐Ÿ˜ƒ +##์ผฐ +##๋ญ” +##ใ‚„ +##ใ‚ฐ +##์ญ +##์„ฏ +##๋ฐญ +##ํ›ผ +##์‘จ +##ํ„ฑ +##ํ‘ผ +##์›ก +##์˜ด +##ํ†ˆ +##ฮด +##โ˜† +##๋ทธ +##ร  +##ํ…… +##๋‘‘ +##๋ธ +##์‚ฝ +##๋กค +##๋ฏน +##โ™ฉ +##โ™ฌ +##ํ†ก +##๋–จ +##๋ฐด +##ํ‚ต +##ํผ +##์•จ +##๐Ÿ’œ +##ํ”ˆ +##ใฟ +##๋ปฌ +##๊ถˆ +##๋Ž… +##์ฆ +##๐Ÿ˜ฅ +##๋ฟ +##์งฑ +##์ˆ +##๋ฉˆ +##๊ณ +##ํ‚ท +##เน‘ +##๐ŸŽ +##ํ˜ +##์ž  +##์”ฌ +##๋ฐ +##โ–ถ +##์ตธ +##รข +##๊ฐ” +##ํŠ€ +##๋˜ฅ +##์ฐ” +##๋ณถ +##๋†’ +##๊นฝ +##๐Ÿด +##์ˆฉ +##๋€จ +##๋‰˜ +##์Š˜ +##์“ด +##๐Ÿ’ฆ +##ํ—ฟ +##๋ค +##๋ž +##๋ฐธ +##๋ฑƒ +##ํ•ณ +##๊ฐค +##๋ถ“ +##๊ฟฐ +##๋šซ +##ํ€˜ +##๋‚‘ +##ํ… +##รท +##ํ‚ฌ +##๋ท” +##ํ”” +##์ƒน +##ใ‚ธ +##๋ฆ… +##ฮผ +##์บ… +##๋จ +##๋Œœ +##์•ฑ +##๋น” +##์‹ธ +##๋นต +##์ฒธ +##์‚ฅ +##ํด +##๋”œ +##์”ฉ +##๋‡ฝ +##์ง– +##ํ€€ +##๋ฟ” +##ใ‚ณ +##ํ–„ +##ใ…† +##๋ฏฟ +##ํ‹ˆ +##์ฝง +##๋ฉ +##๋ƒ  +##์‹น +##ํ‚ฅ +##๐ŸŒฟ +##๋ต +##์ญˆ +##๊ต‰ +##๋ฟ… +##๋›ฐ +##์…‹ +##โŠ™ +##๋–ณ +##์œ— +##๋ผ +##๋Œ“ +##ใŽŽ +##ํ— +##์‚ +##ใ…‰ +##๋’ค +##ํ˜” +##์œˆ +##ํ‚น +##ํ›™ +##๋ฉ +##๋–„ +##ใ‚ข +##ํ‰ +##์†Ÿ +##๐Ÿ˜ณ +##๏ฝ +##๏ฝ” +##๊ฝ +##๐Ÿƒ +##์ธ„ +##์ฉ” +##ํž +##๋‡ธ +##๋‹Œ +##๋ด‡ +##์žฃ +##ํ‘น +##์Œค +##๋Ÿฟ +##์…ˆ +##๋นค +##โ”” +##ํƒ  +##ํ—™ +##๋ป‘ +##์ดฌ +##์…ฉ +##๊ฑ€ +##ํ—ฌ +##๊ธ +##๋–ก +##๋ŽŒ +##์‹ซ +##๋ฉฑ +##๐ŸŒผ +##๋ถ• +##๊บพ +##๋Œ +##๋ง˜ +##๋ณ +##๏ธ +##๐Ÿ™„ +##์ฑˆ +##ํƒค +##๋ดฃ +##์ฐœ +##๊น +##์–€ +##์ซ“ +##์› +##์ขƒ +##์˜† +##์ƒต +##์ ฏ +##๋ฅต +##๊ฟจ +##์Šด +##๋ ท +##๋ฏ„ +##๋…• +##์ Š +##ํ”• +##โ™ฅ +##โ™ก +##์˜ฎ +##๊นƒ +##์˜ +##๊ฐธ +##๐Ÿ“ +##์ฃ— +##๋‘ฌ +##๋œธ +##๐Ÿ‘ป +##์›Ÿ +##๊ฟ” +##๊ท„ +##ใƒ +##ํ›” +##์–Œ +##ร‰ +##์•ค +##์งธ +##๊ฐ’ +##ํŽ™ +##๋ ต +##๊ผฐ +##ํ™ +##๋Šฆ +##๋• +##๋”ฑ +##๋ถ +##โ”ƒ +##๊ฐ— +##์ณค +##๋ฎ +##๊ด„ +##์ถฅ +##๊ฟ‡ +##๊ฒœ +##โ“ +##โ‘ท +##์™‘ +##์ฎธ +##์ก +##์ฉจ +##๋˜‘ +##์žผ +##์ˆด +##๊ตฟ +##ํ +##์Ÿ +##๋ฒก +##๋‹ฆ +##์‘ค +##ํŠฌ +##๋‹ท +##โ— +##โ‹… +##โ‘ฃ +##๋„ฃ +##์ทŒ +##ูˆ +##์ˆฒ +##์จ +##๐ŸŒธ +##ํŽ‘ +##์Šท +##์œ… +##์ฟจ +##๊ธ +##๋œฐ +##๊ฒ‹ +##๋„ฅ +##ํ›… +##์ฅ” +##ํ›  +##ํšŸ +##๋ฟœ +##ํฐ +##๋„Œ +##๐Ÿ‘ฉ +##ํŒ +##๋Žƒ +##๋‚ธ +##๋Œ• +##๋งท +##๋„ผ +##๋งก +##๊ฝ‚ +##์ฃค +##๋ฃฐ +##๊ป +##๋ฐŒ +##ยฑ +##๊ฝˆ +##์‰ผ +##๋ ‡ +##เท† +##์ญ‰ +##์„น +##ํŠ• +##์ขก +##๋ฉ‹ +##์ฐป +##์‰ฐ +##ํ™‰ +##๋žด +##์‰ +##์‚ฏ +##์•Š +##์•‰ +##๋ฐง +##โ†’ +##ใˆ +##ใˆ +##ใˆ‘ +##๋ฑ€ +##๋ˆ• +##์คŒ +##์ซ‘ +##์‘น +##๋ผ› +##๏ผ‹ +##๊ผˆ +##๋ฎŒ +##์ฆŒ +##๋„™ +##ํŽญ +##๋ฉ• +##๋’€ +##๊ณถ +##์”€ +##์žค +##ํŽ„ +##๊ฐฑ +##์ท„ +##์—Š +##๋ฃฝ +##์› +##โ—† +##์ช„ +##๐Ÿคค +##๋ˆ™ +##๊ปŒ +##โ…ฃ +##๋†” +##์—ฟ +##๊ฐฏ +##๊ฟฉ +##์Šญ +##ห˜ +##ฬ +##๋ € +##โ‘ค +##ํ• +##์† +##์ก” +##๋…” +##๋  +##์‘ฅ +##์—ฎ +##๋ˆ… +##์บก +##๋ปฃ +##๋ฝˆ +##์ผ  +##๋“ญ +##๊ฑ +##์ŒŒ +##๐Ÿ‘ถ +##๐Ÿ‘ง +##๐Ÿ˜‰ +##์ด˜ +##ํ†ณ +##๋… +##๋ฆŽ +##๐Ÿ‘€ +##ํŒฅ +##ํƒฌ +##์ฒซ +##๊ณ— +##๋‘ฃ +##ยฒ +##์ฉœ +##๋”จ +##์‚ถ +##๋ฐ‹ +##๊นŠ +##๋พฐ +##์ฝ +##์จ” +##๊ตผ +##์ฉ +##๋— +##์ฑ” +##๋— +##๋Š +##์”ป +##ํฐ +##์ฟค +##ํˆฐ +##๊ฝ +##๋€Œ +##๋ ™ +##์ข† +##๋…˜ +##์– +##ใƒŽ +##์– +##๋”ค +##๋บŒ +##ะก +##์ด +##๋ž +##์ฏง +##์Œ“ +##๋ฏˆ +##โ–ก +##๊ป +##๐Ÿ™Œ +##๐Ÿคฆ +##์ƒด +##ํŽด +##์ฉ +##์ด +##๋ด… +##์ “ +##๋ฝ +##๊ป‘ +##์œ +##๋ฐ +##๋ตˆ +##ํ˜ +##์ˆจ +##๋คผ +##๋”• +##์ƒท +##๋ฎจ +##๋‚€ +##์„ž +##ใ‰ฃ +##๋„“ +##โ–ผ +##โ†“ +##โ† +##๐Ÿ˜Ž +##์ฝ• +##์š˜ +##์ˆซ +##์ซ„ +##์™ผ +##์“ธ +##ํ—ฅ +##๊ฝน +##๊ผผ +##์šค +##๋„ +##๋น• +##์ฃต +##์šง +##์ฐผ +##์ฐ +##๋ ˜ +##๋ถ‰ +##์ฉก +##๊ฒป +##๐Ÿท +##โ–ฒ +##๋ดค +##์Š› +##์˜ป +##ํ™ฐ +##๋ฒต +##๊ตฃ +##์“ฑ +##์ดต +##์ซ˜ +##์›ฌ +##๋‚ฏ +##์•“ +##์บฅ +##์… +##โ” +##ํ—ท +##๊ตณ +##๐Ÿ™‚ +##โฃ +##๊ทˆ +##ยฐ +##ํŽฉ +##๋ƒ… +##๋ป” +##์˜ +##์ƒŒ +##ํž™ +##๋ด +##ํ…จ +##๊ฒ” +##ํžŒ +##ํ“Œ +##๋ƒ‘ +##์„ฃ +##ยด +##์” +##๊ฒน +##๐Ÿณ +##์ฐ +##์ƒ +##์ฑ™ +##์ค +##์•ณ +##์ŠŒ +##๊น +##ํž‰ +##๐Ÿฝ +##๊ฒ‰ +##์ฏค +##๋„‰ +##์ธ +##์ดค +##ํฝ +##์ข +##๋ฐˆ +##๊น +##๋ฒš +##๐ŸŽถ +##๋ณœ +##์ฟผ +##๋šค +##์ง  +##์ผ +##๋ฒ— +##๊ฑท +##๋ฉง +##์„ฐ +##๋†จ +##๋นณ +##๋Žˆ +##ํ„ +##์ฐน +##๐Ÿ™‹ +##ํŽŒ +##์จŒ +##โ‘จ +##๐Ÿƒ +##๋“ +##๋‚ฎ +##๋‹ฎ +##๋คฝ +##ํ•Œ +##ํŒ +##์ฒต +##์žŒ +##ํŒŽ +##ํŒฐ +##์งญ +##๋ฎ +##๋‹ฟ +##ํŒธ +##์˜› +##๋”ฅ +##์นซ +##๋ง‘ +##๊ฒช +##๋˜ฌ +##๋‚š +##๋‚ฑ +##๋ฅญ +##๋ณ +##๏ฝž +##ฬˆ +##๋‚ต +##์˜™ +##ฬฅ +##๊ผฝ +##์ฌ +##์„ถ +##โ˜… +##์–น +##๋ฐ‰ +##์šœ +##๋ฏ +##์ธจ +##๋‚ซ +##๊ฐ› +##๊ฐ‡ +##์งน +##์ˆ +##์ฅด +##์›น +##ํ•ซ +##์ˆฏ +##ํŽ +##ํ…€ +##๋ฎฌ +##ะŸ +##๋‹ +##โ˜ž +##์†Ž +##์ผ +##์”ฝ +##์”Œ +##์›ฐ +##์ฟก +##๋ˆŒ +##์…ด +##๊ตต +##๋œฌ +##๋•€ +##๐Ÿค” +##์ƒฅ +##์‰ด +##ํŒน +##ะ’ +##์พจ +##โ–ฝ +##์™ +##ํ€ผ +##๏ผ +##์…ค +##๏ผœ +##์ ผ +##์ฑŒ +##๋€œ +##๋ฃป +##ํŒœ +##๏ฝ‡ +##ํ’‹ +##โ—ก +##ํˆผ +##์žƒ +##๊ฐ  +##์ปท +##๋„œ +##ํŒป +##๊ฑ” +##๋”ช +##ํ…ก +##ํ• +##ํ•ผ +##๋ +##์žญ +##์บฃ +##์žฝ +##์ +##ํ– +##์„€ +##๋Ž +##ํˆด +##๋Ž  +##๋Œ +##๋•  +##์Žˆ +##๋Ÿ +##โ— +##๋ฆ +##๋’ˆ +##๋„ต +##๋ตค +##โ™ค +##์ฟฐ +##โ‘ฆ +##๊นœ +##ํ‹‹ +##โ–ณ +##โ‘ฅ +##๋ก„ +##๋‚ณ +##์™  +##ํ˜“ +##ํŽ +##ํ—‰ +##์ฝฅ +##๋ณ• +##์ฌ +##์žŠ +##๋ž  +##๋ฒณ +##๏ผž +##๋‡ฐ +##๋ƒ˜ +##ํŒ  +##โ‘ช +##ํœผ +##โ‘ต +##๋งฃ +##๋•‹ +##ํœ‘ +##์˜ญ +##๐Ÿ˜ +##โ‚‚ +##๋‘ก +##ํฟ +##๊ณธ +##ฮ” +##๊ฟ‹ +##ใ‰ข +##์Œ” +##์ฅ˜ +##๋ง +##์ฐข +##ห +##๋ž– +##ํ‰ค +##์ฑ— +##ํ“ธ +##์†ฅ +##์ฝฐ +##์พ… +##ะœ +##๋บ‘ +##์จ‹ +##๋ƒ‡ +##์ฐก +##์”ฐ +##ห +##๋ญ˜ +##๋„ด +##์Šฅ +##๋›ธ +##ํƒฏ +##ํ›‘ +##๋จ€ +##์ˆ„ +##๋นฝ +##์—Ž +##๋”ด +##โ…ข +##๋ฌฝ +##โ‰ช +##์‰˜ +##โ‘ถ +##๊บฝ +##๋ญก +##์ปน +##๋ฎด +##์ˆ‘ +##๊นŸ +##๊ฐš +##ํ›— +##์›ฉ +##์จฉ +##ใ‰ฏ +##๊ถ‚ +##โ†‘ +##๋• +##์œ” +##๏ฟฃ +##๋•” +##ํŽซ +##ํŒ +##ํ•ฅ +##์ƒ… +##์Š +##ํœ€ +##ํ™‹ +##โ‚ +##์ฐง +##ํƒญ +##ํ—น +##๋” +##์งง +##โ‘ง +##์”น +##์ฑฑ +##์”ธ +##์ฐ +##๋นด +##โ‘  +##๋› +##์นก +##ใŽ +##๋ฟ +##๋–ค +##์ญ +##๋ฉ“ +##๊ดญ +##ํ‹” +##๊ท“ +##ใˆœ +##๋งฌ +##๋”› +##์ง™ +##๊นฌ +##์Œฉ +##โœจ +##๋Š™ +##๋“ +##์ˆŸ +##์‚˜ +##โ‘ฉ +##ํ…Ÿ +##๋ฌ +##์˜… +##ํ…ƒ +##๋”ง +##๊ฒŠ +##ยน +##ํ›จ +##๊ฐ‰ +##๋— +##โ…ฅ +##์ฌ +##โ“‘ +##โ“’ +##๋Œ‘ +##์ณ‡ +##์ด› +##หš +##๋ปฅ +##๋ฑ +##โ—‡ +##ํญ +##ใ‰ฎ +##ํ‰ˆ +##โˆ™ +##์ญ™ +##ํ€ต +##๋ฝ +##๋†‰ +##โค +##๊นฅ +##๋Ž +##๊ผฟ +##์คซ +##์ผค +##์…ง +##๋บ„ +##๊ธฑ +##๋งบ +##์ ญ +##โ˜• +##์–• +##๏ฝœ +##๊ฐฌ +##ํ˜ +##ํž› +##๋œฏ +##์ฑ˜ +##๋€ +##๊นŽ +##๋งธ +##๋™ +##ํ… +##๋“ฆ +##โ”€ +##์™ค +##๊ฟŽ +##ใ‰ก +##แƒฆ +##์ ฌ +##ฬฃ +##๊ถœ +##๋ƒ” +##๋‚Ÿ +##์ปฅ +##๋บจ +##์ค +##๋™ค +##๋ซ +##์‰‘ +##โœ” +##์Ÿ +##์  +##์ปธ +##์ฝธ +##ํŠค +##๋นป +##๋œ€ +##ใ€“ +##ใ…ค +##โ—Ž +##โ”Œ +##โ”‚ +##๊ดŒ +##๋ฐŸ +##์ƒœ +##๊ฐฐ +##ํˆ +##์ณ‰ +##๋•œ +##์ฅฐ +##๋Ž€ +##์ฟฑ +##ํƒ“ +##๋„‹ +##๋ฌป +##๊ฝฅ +##ํ‚ +##ํ—ด +##ใŽ– +##์ŸŒ +##์จ +##โ„ข +##๊ณง +##์žฐ +##๋ฉŽ +##๋›ด +##๋’น +##๋ปค +##๊นฐ +##์ธฐ +##๋ต +##๋ฎˆ +##๐Ÿ“Œ +##โ‚ฌ +##์–ฝ +##ํฉ +##์…Œ +##์–ป +##์ซ™ +##๋—€ +##์žฆ +##โœ… +##โ‘ซ +##๋ฃ€ +##๋นš +##์Œ• +##๋ป— +##๊ท +##๋‚ก +##๋–ซ +##ํ—ต +##์žด +##๋–ต +##๊น„ +##์›œ +##๋ฝ +##๋Šช +##์–‡ +##ํšƒ +##์–— +##๊บ… +##๋ช„ +##โ‰ซ +##โ™ฃ +##์ฉ +##โ„“ +##ํ„ +##โญ +##์˜‡ +##์‡ณ +##ํƒฐ +##๋ฑ‰ +##์‚‘ +##๋จ +##๋ฟก +##ํœœ +##์Œ‰ +##๋ค˜ +##๋„ธ +##ใŽ +##์ช˜ +##โ–ท +##๊ฒก +##ํ…ผ +##ํ€„ +##โžก +##ํ“ฐ +##์ญค +##๋บ€ +##๊นผ +##์ฝฑ +##๊ฝค +##ํฝ +##ํƒ‰ +##๋„ +##์‰ฟ +##๊ตถ +##โ™€ +##์ƒ€ +##๋™‡ +##ใ‰ฐ +##โ˜บ +##ํ™‘ +##โ‡’ +##ํ™ฑ +##๋ชน +##๋€” +##์งข +##โ—ˆ +##ใ‰  +##์™ +##์พฐ +##์•ด +##ใŽฟ +##โœŒ +##๋ญฃ +##๋บ +##๏ผ„ +##๋–ด +##์ฑ• +##ํœฉ +##์ˆฑ +##๊ฐญ +##ํŠฟ +##๋‹ข +##ํ…„ +##ํœ™ +##๋ฌถ +##๋‹ป +##๋ต™ +##์‡ค +##์˜ณ +##ใŠ +##์•Ž +##โ–จ +##๋ทด +##โ–ซ +##โ”ฌ +##๋ขด +##์ซ’ +##ใˆ” +##ํ‘€ +##์—พ +##ํ–… +##๊ฝ‰ +##๊ป€ +##์ปซ +##โ‘ด +##์ข” +##๋ตŒ +##ยฝ +##๋‰  +##๋ฒง +##๋•„ +##๋‹ณ +##์žฟ +##์œ +##๊ณฏ +##๋ˆด +##๋ฉ +##์งŠ +##๋ดฌ +##์Š‰ +##ใŽœ +##์‹ฃ +##๋‚ +##ํ‘ฏ +##๋ผ‰ +##๋‰œ +##๊ณช +##ใŽž +##โ™‚ +##โˆ’ +##์ข‡ +##โ†” +##ํŠ„ +##โ‚ฉ +##ใ„ +##๋จ +##โ˜ +##๋ˆ +##ํŽผ +##ใŽ +##โ”œ +##ใŽ‰ +##๋ผ˜ +##์—ท +##ใŽ’ +##๊ป +##ใˆŽ +##์งฐ +##ํ‰œ +##โ–ฃ +##๋บ +##์œ• +##์›ธ +##ใŽพ +##๋ญ +##ใŽฅ +##๋— +##์Š +##โ˜Ž +์œผ๋กœ +์—์„œ +ํ•œ๋‹ค +##00 +##๋‹ˆ๋‹ค +์—๊ฒŒ +๊นŒ์ง€ +์ง€๋งŒ +์‚ฌ๋žŒ +19 +๋ฉด์„œ +๋ผ๊ณ  +20 +๋‹ค๋Š” +์•„๋‹ˆ +๋Œ€ํ•œ +์šฐ๋ฆฌ +๋œ๋‹ค +๋ผ๋Š” +๋Š”๋ฐ +๋‹ค๊ณ  +๋•Œ๋ฌธ +๊ทธ๋Ÿฌ +์Šต๋‹ˆ๋‹ค +์ง€๋‚œ +์œ„ํ•ด +๋ถ€ํ„ฐ +์ƒ๊ฐ +ํ•œ๊ตญ +๋ณด๋‹ค +10 +์ง€์—ญ +201 +200 +๊ทธ๋ฆฌ +๋ฌธ์ œ +์ž์‹  +๊ฒฝ์šฐ +๊ทธ๋Ÿฐ +์ด๋‚˜ +์‚ฌ์—… +ํ†ตํ•ด +๋Œ€ํ•ด +์‹œ๊ฐ„ +์‚ฌํšŒ +๋”ฐ๋ผ +๋‹ค๋ฅธ +๊ธฐ์—… +ํ•ด์•ผ +ํ•จ๊ป˜ +์•„์ด +์‹œ์žฅ +๋ฏธ๊ตญ +ํ•˜๋‚˜ +๊ทธ๋ ‡ +๊ด€๊ณ„ +์ •๋ถ€ +์ด์ƒ +๊ฐ€์ง€ +๋Œ€ํ‘œ +์„ธ๊ณ„ +์‹œ์ž‘ +๊ฐ€๋Šฅ +์ง€๊ธˆ +๊ต์œก +๊ทธ๋ฆฌ๊ณ  +์„œ์šธ +๊ฐ€์žฅ +๊ฒฝ๊ธฐ +๊ทธ๋Ÿฌ๋‚˜ +์‚ฌ์‹ค +์ฒ˜๋Ÿผ +๊ฒฝ์ œ +๋„๋ก +์ด๋Ÿฐ +๋งŒ๋“ค +๊ทธ๋ž˜ +ํ•„์š” +์‚ฌ์šฉ +๋Š”๋‹ค +์ •๋„ +์ผ๋ณธ +ํ”„๋กœ +##์ง€๋งŒ +์œ„์› +์—ฐ๊ตฌ +๋‹ค์‹œ +๋“ค์–ด +12 +์ค‘๊ตญ +์–ด์š” +ํ•ด์„œ +์ดํ›„ +์œ„ํ•œ +์ง€์› +๋ฌธํ™” +์œผ๋ฉด +์ด๋ฒˆ +##ํ†ต๋ น +11 +์œผ๋ฉฐ +ํ•˜์ง€๋งŒ +๋Œ€ํ†ต๋ น +๋ชจ๋‘ +๋‹ค์Œ +๊ด€๋ จ +๋Œ€ํ•™ +์ด์ œ +์ •์น˜ +์–ด์„œ +์ƒํ™ฉ +๋ฐํ˜” +ํˆฌ์ž +๊ตญ๊ฐ€ +์‚ฌ์ด +ํ˜„์žฌ +๋‹ค๊ฐ€ +๊ฐœ๋ฐœ +๋ชจ๋“  +๊ณ„ํš +##๋‹ˆ๊นŒ +์˜์› +์–ด๋–ค +##er +ํ•™๊ต +15 +๊ธฐ์ˆ  +์ด๋ ‡ +๊ฑฐ๋‚˜ +๋งˆ์Œ +์ž…๋‹ˆ๋‹ค +ํŠนํžˆ +18 +๊ฒฐ๊ณผ +ํ™œ๋™ +๊ทธ๊ฒƒ +30 +๋งŽ์ด +ํ›„๋ณด +์˜ํ™” +๊ตญ๋ฏผ +ใ…‹ใ…‹ +##์ŠคํŠธ +์กฐ์‚ฌ +๊ตญ๋‚ด +๊ณผ์ • +์ด๋ ‡๊ฒŒ +์ด์•ผ +์ง„ํ–‰ +์ค‘์š” +์ง€๋‚œํ•ด +๋„์‹œ +์ƒˆ๋กœ +์ตœ๊ทผ +๋™์•ˆ +์‚ฐ์—… +๋‹ค์–‘ +์ „๋ฌธ +๋‚˜ํƒ€ +##on +์ด์šฉ +๊ด€๋ฆฌ +##๋ถ€ํ„ฐ +๊ธฐ๋ก +##์ฃผ์˜ +๋Œ€์ƒ +๋ฏผ์ฃผ +๋Š”์ง€ +๋˜ํ•œ +์ด์•ผ๊ธฐ +์ •๋ณด +์—ฌ๋Ÿฌ +์˜ฌํ•ด +ํšŒ์‚ฌ +๋‹น์‹œ +199 +์ด๋ฏธ +์ฒ˜์Œ +์„ ์ˆ˜ +๋‚˜๋ผ +ํ•™์ƒ +##๋Œ€๋กœ +์ •์ฑ… +๋„ˆ๋ฌด +์—ฌ๊ธฐ +ํ‰๊ฐ€ +๋ณด์ด +์–ด๋–ป +์Šคํƒ€ +์ด์œ  +๋ชจ์Šต +์ž๊ธฐ +##000 +๊ฒฐ์ • +##์‹œ์•„ +##๋ผ๊ณ  +์šด์˜ +์‚ฌ๋ž‘ +์˜๋ฏธ +๊ทธ๋ž˜์„œ +๋ฐ”๋กœ +์ž๋ฆฌ +์ด์–ด +๋‚ด์šฉ +์ƒํ™œ +์„ค๋ช… +์ธ๊ฐ„ +๊ธˆ์œต +์—ฌ์„ฑ +์šด๋™ +๋Œ์•„ +16 +๋‹ค๋ฉด +์—†์ด +์‹œํ‚ค +์•„๋ฌด +##๋น„์Šค +ํ•œ๋‹ค๋Š” +##in +์ฃผ์žฅ +๊ทธ๋…€ +์ œํ’ˆ +์ƒˆ๋กœ์šด +ํ•ฉ๋‹ˆ๋‹ค +๋ชปํ•˜ +์ด๋Ÿฌ +##์–ด๋‚˜ +๋ฐฉ๋ฒ• +##or +์—ญ์‚ฌ +13 +๊ด€๊ณ„์ž +์ฐธ์—ฌ +๋ฐฉ์†ก +๊ตญ์ œ +##์šด๋ฐ +์ด๋‚  +##์œผ๋ฉฐ +์˜ค๋Š˜ +##an +ํ™˜๊ฒฝ +๊ฐ™์ด +์–ด์•ผ +์ตœ๊ณ  +๊ฐ๋… +์—…์ฒด +์ง€๋ฐฉ +์‚ฌ๊ฑด +14 +์ด๋ฆ„ +์„ ๊ฑฐ +๊ณ„์† +์‹œ๋Œ€ +์„œ๋น„์Šค +ํ˜„๋Œ€ +์˜ˆ์ • +17 +๋ถํ•œ +๊ฐ€์šด๋ฐ +๊ทœ๋ชจ +๊ธฐ์ค€ +๋ผ๋ฉฐ +์ถ”์ง„ +๊ธฐ๊ด€ +์ค‘์‹ฌ +๋ฐœ์ „ +์‚ผ์„ฑ +๊ตฌ์„ฑ +##๋ ˆ์ด +๋‚˜์˜ค +์กฐ์„  +๋ฌผ๋ก  +๋งŒํผ +๊ฒฝ์ฐฐ +๋ถ€๋ถ„ +์ƒ์‚ฐ +์–ด๋–ป๊ฒŒ +์—ฌ์ž +์‹œ๋ฏผ +๋”ฐ๋ฅด +์ง์ ‘ +๊ต์ˆ˜ +๋ฐœ์ƒ +์ƒํƒœ +๊ฐ€๊ฒฉ +๊ฒฝ์Ÿ +##๋žœ๋“œ +์†Œ๋ฆฌ +##๋จธ๋‹ˆ +25 +์•Œ๋ ค +๋ฐœํ‘œ +์ค€๋น„ +##์ธ๋‹ค +๊ด€์‹ฌ +๋Œ€ํšŒ +์–ด๋Š +์„ฑ์žฅ +์•„๋‹Œ +์ƒ๋Œ€ +์ œ๊ณต +ํฌํ•จ +##en +ํšŒ์žฅ +##๋ฆฌ์•„ +์–ด๋ ค +์ผ๋ฐ˜ +์—ญ์‹œ +๊ธฐ๋Œ€ +๊ทธ๋Ÿฐ๋ฐ +์ผ๋ถ€ +๋ณด์—ฌ +100 +์ „์ฒด +ํšจ๊ณผ +๋ถ€์‚ฐ +ํŒ๋งค +๊ฐœ์ธ +์„ฑ๊ณต +๊ตญํšŒ +๋˜๋Š” +๋ณ€ํ™” +์–˜๊ธฐ +์ž์œ  +๋กœ์„œ +๋Œ€๊ตฌ +์ด๋ผ๊ณ  +๋ถ„์„ +์ž‘ํ’ˆ +๋ฌด์—‡ +ํ”„๋กœ๊ทธ +๋–จ์–ด +์š”๊ตฌ +ํ™•์ธ +๋…ธ๋™ +์œ„์›ํšŒ +๋งˆ๋ จ +์ธํ„ฐ +๊ตฌ์กฐ +์ž์—ฐ +์ธ์ง€ +ใ…‹ใ…‹ใ…‹ +์•„์ง +์ˆ˜์ค€ +์ดํ•ด +ํ”„๋กœ๊ทธ๋žจ +์กด์žฌ +ํ–‰์‚ฌ +##๋ฒ„์ง€ +๊ทธ๋ ‡๊ฒŒ +๊ฐ€์กฑ +์˜ํ–ฅ +##ti +์€ํ–‰ +์ธ๋ฐ +50 +๋”์šฑ +๊ธฐ๊ฐ„ +๋”๋ผ +๊ณ ๊ฐ +๊ฒฝ์˜ +๋ชจ๋ฅด +์„ผํ„ฐ +์ „๊ตญ +์‹œ์„ค +๊ณต๋™ +์ด๊ฒƒ +๋Œ€ํ•ด์„œ +๋‹จ์ฒด +์นœ๊ตฌ +##ar +์ž๋™ +์–ผ๋งˆ +์ž…์žฅ +์‹ค์ œ +์œ„ํ•ด์„œ +๊ฒฐ๊ตญ +##al +๋ฐฉ์‹ +์ฆ๊ฐ€ +์†Œ๋น„ +์ •๋ง +๋‹ˆ๊นŒ +์กฐ์ง +์ตœ๋Œ€ +์ธ๊ฐ€ +์ƒํ’ˆ +๋Œ€๋ถ€ +198 +์ฃผ๋ฏผ +ํ•œํŽธ +์ง€์  +์ง€๋‚˜ +๋‹ฌ๋Ÿฌ +์ด๋ฃจ +๋…ธ๋ ฅ +##es +์—ญํ•  +๊ฑด์„ค +๊ฐ€์น˜ +##์ง„๋‹ค +๋ถ„์•ผ +์ œ๋„ +์–ผ๊ตด +ํ”ผํ•ด +ํ•ด๋„ +๋ผ๋ฉด +ํŠน๋ณ„ +21 +๋‚จ์ž +ํ™œ์šฉ +๊ฐ•์กฐ +ํ™•๋Œ€ +๋‚˜๊ฐ€ +์„ธ์ƒ +๋Œ€๋ถ€๋ถ„ +๋งˆ๋‹ค +์ฑ…์ž„ +ํšŒ์˜ +์ •์‹  +๊ณผํ•™ +๊ฑด๊ฐ• +24 +์ธ์‚ฌ +๋Œ€๋กœ +##๊ฑฐ๋ฆฌ +์œ ์ง€ +์‚ฌ๊ณ  +๋‹น์‹  +๊ธฐ๋Šฅ +์ „๋žต +๊ฐ•ํ™” +์„ ํƒ +๊ทธ๋ฃน +๋จผ์ € +๊ณต๊ฐœ +์–ด๋ฆฐ +์ž์ฒด +ํ˜„์‹ค +๋งŒ๋‚˜ +ํ•œ๋‹ค๊ณ  +์‹œ์ฆŒ +๋กœ๋ถ€ํ„ฐ +์ค‘์•™ +์‹œ์Šค +์ „๋ง +๋ชปํ–ˆ +์ฐพ์•„ +์‚ฌ์ง„ +##์ดํŠธ +##์ „ํžˆ +๋Š”๊ฐ€ +40 +๋ฐฐ์šฐ +์กฐ๊ธˆ +ํ‘œํ˜„ +์˜ํ•ด +ํ–‰์ • +22 +๋Šฅ๋ ฅ +์„๊นŒ +์ „์Ÿ +์•„๋ฒ„์ง€ +์œ„์น˜ +๋ฏธ๋ž˜ +์™ธ๊ตญ +์ฃผํƒ +๋ผ๋„ +๊ฒฝํ—˜ +์•ˆ์ „ +##๋‹ค๋Š” +์„œ๋กœ +์ธ์ • +๋ณ‘์› +ํ•ด๊ฒฐ +์Œ์•… +ํ•˜๋ฃจ +๊ธฐ์ž +์ธ์ฒœ +๋ฐ–์— +๊ฒŒ์ž„ +์˜ˆ์ƒ +๋น„๋กฏ +##์†Œ๋ฆฌ +๊ธฐ์กด +์—ฌํ–‰ +๋ณดํ—˜ +๊ณต๊ฐ„ +๋ชฉํ‘œ +๊ณต์‚ฌ +๊ด€๊ด‘ +23 +๋”ฐ๋ผ์„œ +๋“œ๋Ÿฌ +์ •์ƒ +์‹œ์Šคํ…œ +์˜ฌ๋ผ +๋ˆ„๊ตฌ +##์ด์Šค +์ฃผ์š” +๋ ค๊ณ  +๋ณด์ธ๋‹ค +ํ•ด์™ธ +๊ฐœ์„  +์ง์› +๋งˆ์„ +๊ณ„์•ฝ +๋จธ๋ฆฌ +๋‚˜์˜จ +์–ด๋”” +๊ธฐ๋ณธ +์˜คํ›„ +์ง€๋„ +๋ชปํ•œ +๊ณ ๋ ค +์ž๋™์ฐจ +์•„์š” +๊ฑฐ๋ž˜ +๋‚ด๋ ค +๊ฐœ์›” +ํ˜„์žฅ +๊ด‘์ฃผ +##he +์ด์ „ +๋ฐ˜๋Œ€ +์•„๋“ค +์—ด๋ฆฐ +##์—†์ด +์œ„๊ธฐ +##๋„ˆ์ง€ +๋ฐฉ๋ฌธ +์‹ค์‹œ +ํ•ด๋‹น +์•„ํŒŒ +์•„์„œ +์ ์šฉ +์ˆ˜์‚ฌ +๋งˆ์ง€ +##๋ฏผ๊ตญ +์ถ”๊ฐ€ +์ง€์† +์ œ์ž‘ +๊ทธ๋žจ +์„ค์น˜ +๊ฑฐ๋ฆฌ +๊ฑฐ์˜ +ํ”„๋ž‘ +์œผ๋กœ์จ +๋Œ€๋น„ +๋ฐœ๊ฒฌ +๊ธฐํšŒ +์œ„์›์žฅ +์œผ๋กœ์„œ +์ „ํ™” +##์œผ๋‚˜ +์˜ˆ์ˆ  +์œ ๋Ÿฝ +๋‹จ๊ณ„ +๋Œ€ํ•œ๋ฏผ๊ตญ +์•„์•ผ +๋งˆ์ง€๋ง‰ +ํ–‰๋™ +##์–ด๋‚œ +26 +์˜๊ตญ +๋ ค๋Š” +27 +์•„์ฃผ +๋„์›€ +์ ๊ทน +##๋งˆํŠธ +์—„๋งˆ +์ƒ์Šน +์žฅ๊ด€ +๊ณต๊ฒฉ +๋งค์šฐ +์ƒ๋‹น +๋“ค์–ด๊ฐ€ +์—…๋ฌด +์น˜๋ฃŒ +์˜ˆ์‚ฐ +28 +์ž‘์—… +๋ฆฌ๊ทธ +๋Š๋ƒ +์–ธ๋ก  +ํ†ตํ•ฉ +ํŒ๋‹จ +๊ณต์—ฐ +๋Œ€์‹  +๋™์‹œ +์ฐจ์ด +๋ฐ”๋žŒ +##์Šค๋กœ +##๋™์•ˆ +##๋‚˜๋ผ +ํ–‰์œ„ +์ง‘์ค‘ +๊ฐ์‚ฌ +์ด์‚ฌ +๋น„ํŒ +##์ž๋ฆฌ +๊ฒ€์ฐฐ +์ž‘๊ฐ€ +๊ฐ๊ฐ +๋ฐ”๋ผ +๋ถ€๋‹ด +์ž๋ฃŒ +๋‹ฌ๋ฆฌ +๋…์ผ +197 +๋ณดํ˜ธ +์•„ํŒŒํŠธ +๋ฏผ์ฃผ๋‹น +์ˆ˜์ต +์šฐ์„  +##ic +๋ณด๋‚ด +์Œ์‹ +์‹œ์ผœ +์„ ์ • +์„ ์ƒ +๊ณผ๊ฑฐ +์˜์‚ฌ +์œผ๋‚˜ +๋ธŒ๋žœ๋“œ +์ฐจ์ง€ +์กฐ์„ฑ +์–ด๋จธ๋‹ˆ +์˜๊ฒฌ +์Šค์Šค๋กœ +์ฒ˜๋ฆฌ +์‚ฌ์žฅ +์„ธ์š” +๋ฌธํ•™ +ํ™•๋ณด +๋งŒ๋“  +ํ˜‘๋ ฅ +์กฐ์ • +๋‚ด๋…„ +์ฃผ๋ณ€ +์ด๋ฅด +๋ฌด์Šจ +ํ”„๋ž‘์Šค +##is +๊ฒฐํ˜ผ +ํ‰๊ท  +๊ทธ๋•Œ +๊ทธ๋ƒฅ +์ง€๊ตฌ +ํ˜•ํƒœ +์ถœ์‹  +๋ถ€์กฑ +##๋””์–ด +๋‹จ์ง€ +๋น„์šฉ +๊ทธ๋ฆผ +์‚ฌ๋ฌด +##๋ฅธ๋‹ค +์ „๋ฌธ๊ฐ€ +๊ณต๊ธ‰ +ํ–‰๋ณต +๊ฐ€๊นŒ +์›€์ง +๋ชฉ์  +๋‚˜์™” +์•ˆ์ • +๊ด€ํ•œ +๋ณด๋„ +์ธํ„ฐ๋„ท +๋”๋ผ๋„ +์ฃผ์ธ +๊ธฐ์–ต +##๋ฆฌ์Šค +๊ณต๋ถ€ +์œ„ํ—˜ +์ „ํ†ต +์ธ์‹ +๋“œ๋ผ +##๋ฆฐ๋‹ค +์ž๊ธˆ +์ˆœ๊ฐ„ +์ „์ž +์ œ๋Œ€๋กœ +๋ชฉ์†Œ๋ฆฌ +##it +์‹œ๊ธฐ +์Šค๋Ÿฌ +๋ถ€๋ชจ +์ด์ต +์—๋„ˆ์ง€ +์ง„์งœ +29 +์ง€์ง€ +์˜์‹ +๊ธ€๋กœ +์–ด๋ ต +๋ˆ„๋ฆฌ +์ˆ˜์ถœ +๊ทผ๋ฐ +๋ถ„์œ„ +์ œ์‹œ +์–ธ์ œ +์ตœ์ดˆ +๋ณด์˜€ +๋ง์”€ +##์ „์ž +์Šค๋Ÿฝ +ํ†ต์‹  +##ํ•™๊ต +์ธ๊ธฐ +##๊ฐ€์ง€ +๋น„๊ต +๋ชจ๋ธ +๊ฑฐ๋“  +##at +๊ทœ์ • +๋ณต์ง€ +๋„์ž… +๋Œ€์ฑ… +##el +๋ฐ˜๋ฉด +๋‚˜์ด +๋งค์ถœ +๊ทธ๋Œ€๋กœ +๋ถ„์œ„๊ธฐ +๊ธ€๋กœ๋ฒŒ +๋””์ž +์•„๋ž˜ +##om +๋ถ„๋ช… +ํ™˜์ž +์ฃผ์‹ +์•ž์„œ +๊ตํ†ต +์ œ๊ธฐ +๋ฐฉ์•ˆ +์ง€์š” +๋Œ€์ „ +์ „์‹œ +60 +๋…ธ๋ž˜ +์‹ ๋ฌธ +์˜คํžˆ +์„ธ๊ธฐ +TV +๋“ฑ์žฅ +์ฐจ๋ก€ +์˜คํžˆ๋ ค +๊ทธ๋Ÿผ +์šฐ๋ ค +์˜์—… +๋ฐฉํ–ฅ +์†Œ๋น„์ž +์ด๋ค„ +์ข…ํ•ฉ +์˜ฌ๋ฆผ +์†Œ์„ค +์ฐธ๊ฐ€ +์–ผ๋งˆ๋‚˜ +๋Š๋‚Œ +์ฒดํ—˜ +์ƒ๋ช… +๊ฐ€๊ตฌ +์‹œํ–‰ +์ถฉ๋ถ„ +๊ณ ์ž +ํ•ต์‹ฌ +๋น„ํ•ด +์˜๋ฃŒ +LG +๊ณต์žฅ +์ธ๋ฌผ +๊ฐœ์ตœ +๋”ฐ๋ฅธ +๋กฏ๋ฐ +๊ธฐ๋‹ค +๋…ผ์˜ +์šฐ์Šน +๋†’์ด +##์ด์–ด +์Šค๋งˆํŠธ +196 +##๋ ˆ์Šค +์˜ˆ์š” +๊ฐ€์ง„ +์ œ์ฃผ +์ง€์‹ +์ผ์ • +์†Œ์† +์‹œ์ ˆ +์œ ๋ช… +์†Œ๊ฐœ +์žฅ์•  +ํ˜„์ƒ +์ธํ•ด +์ˆ˜๋„ +๊ฐ€์ • +์—ฐ๊ฒฐ +์ธ์ƒ +##์–ด์ง€ +์ถ•๊ตฌ +##์•„๋“ค +ํ”ผ๋ถ€ +ํ•œํ…Œ +์ด๋™ +2000 +๋‹ค๋ฅด +๊ด‘๊ณ  +์งˆ๋ฌธ +์ธ๊ตฌ +์„ค๋ฆฝ +์œผ๋กœ๋ถ€ํ„ฐ +๋ฏผ์กฑ +##ํ•œ๋‹ค +๋งˆ์น˜ +๊ฒ€์‚ฌ +์ „ํ˜€ +์˜ฌ๋ฆผํ”ฝ +์žฅ๊ธฐ +์กฐ๊ฑด +๊ฐœํ˜ +์‚ฌ์ƒ +์ค‘์†Œ +์ธ๋„ +##์ดํ„ฐ +๊ฑฐ๊ธฐ +์นด๋“œ +๊ธฐ์‚ฌ +์ „๊ธฐ +##ol +๋”๋‹ˆ +๊ณต์‹ +์ž–์•„ +##am +๊ฐ€์ ธ +์‹ ์ฒญ +31 +์•„๋ฆ„ +์ž˜๋ชป +์ •๋‹น +์ฐธ์„ +๋„ค์š” +๊ฑด๋ฌผ +๋Œ€ํ™” +2010 +##์•„๊ฐ€ +์ง„์ถœ +##๊ธฐ์—… +SK +์ฐจ๋Ÿ‰ +์—์„  +๊ฐœ๋… +๋„๋กœ +##tion +##st +๊ธฐํš +##ing +์˜ค์ „ +๋‹ด๋‹น +ํฌ๋ง +##ul +๋ถ€๋ฌธ +์‚ฌ๋ก€ +ํž˜๋“ค +##๋ผ์ธ +์ด๊ณณ +๊ฐ์ข… +์‹œํ—˜ +์—…๊ณ„ +์•„์นจ +์ฒด๊ณ„ +์—ฐ๊ธฐ +์Šคํฌ +์ฒญ์†Œ +์ด๊ฒŒ +์žฌ์ • +์ง€๋‚œ๋‹ฌ +์„ฑ๊ณผ +๋ถ€๋™ +๋ฐ”์ด +๋Œ€์ถœ +๋‚ด๋ถ€ +์œ ์น˜ +์ฃผ๋ชฉ +๊ทธ๋งŒ +์ •ํ™• +ํ•ฉ์˜ +์ถœ์‹œ +๊ณต๊ณต +๋Ÿฌ์‹œ์•„ +์ฒด์ œ +๋””์ž์ธ +##์ด์…˜ +์•„์‹œ์•„ +์ž๋ณธ +์•„๋งˆ +ํ›ˆ๋ จ +##BS +์žฌํŒ +ํŽ€๋“œ +ํ˜์‹  +##์ธํŠธ +์†Œ๋“ +๋‚˜๋ฌด +๋‚˜ํƒ€๋‚ฌ +ํ–ˆ์œผ๋ฉฐ +ํ˜์˜ +##๋ฌด์› +2012 +๊ฑฑ์ • +์—ฐํ•ฉ +๊ธฐ๋ฐ˜ +๋‹จ์ˆœ +##ur +๋‹ค๋‹ˆ +์‚ฌ๋ผ +๋…๋ฆฝ +์›์ธ +์‹œ์ฒญ +์„ธ๋Œ€ +์˜ํšŒ +๊ตํšŒ +์ˆ˜์š” +๋…ผ๋ž€ +์ˆ˜์ž… +ํ˜•์„ฑ +##์ดํฌ +์ฃผ๋กœ +๊ตฌ์ฒด +70 +๋น„์Šท +์ค€๋‹ค +๋“œ๋ผ๋งˆ +์˜ค๋ž˜ +๋Œ€ํ˜• +๋ฐ˜์‘ +์ธ์ œ +##์ด์ง€ +์Šน๋ฆฌ +๋ฒ„๋ฆฌ +##il +๋Œ€์‘ +ํ˜‘์˜ +๊ต์‚ฌ +##๋ฏ€๋กœ +๊ฐ€์ž… +##et +๋ถ€์ • +๋ฌด๋Œ€ +๋ณธ๋ถ€ +์‹ค์ œ๋กœ +์ฒญ์™€ +์ผ๋‹จ +์ฐจ์› +์Šคํฌ์ธ  +์ฒญ์™€๋Œ€ +๊ณ ๋ฏผ +๊ฐ์†Œ +์ž์› +์ž์‚ฐ +ํ‰ํ™” +๋‚จํŽธ +๋ชจ์–‘ +๋ถ€๋™์‚ฐ +์š”์ฆ˜ +์–ด๋ฆฐ์ด +์ง€์ • +##์–ด๋“ค +๊ฒฝ๊ธฐ๋„ +์Šต๋‹ˆ๊นŒ +์ด๋Œ +์ฃผ์ œ +์ „๋‹ฌ +์„ ๋ณด +๋Š๋ผ +๊ตฌ์ถ• +์กฐ์น˜ +์ดํ•˜ +๋“ฏ์ด +##์Šคํ„ฐ +์ถœ์—ฐ +๊ฐ€์Šด +๊ฑฐ๋“ ์š” +์–ธ์–ด +์—์š” +ํ•ญ๊ณต +๋ณด๊ณ  +์ˆ˜ํ–‰ +์˜์–ด +์—ฌ๋ฆ„ +๋‚˜์™€ +๋ถˆ๊ณผ +์ „ํ™˜ +๋ฐ˜์˜ +๊ตฐ์‚ฌ +๋ถ€๋ฅด +๋‚จ์„ฑ +์ฒญ๋…„ +๊ฒ€ํ†  +2011 +๋‹ค๋งŒ +๊ณต์› +๊ฐ์ • +์œผ๋‹ˆ +์„ ๋ฌผ +์ œ์™ธ +##์–ด๋‚ฌ +์ž์„ธ +๊ฒฝ๋‚จ +์›”๋“œ +์–ด๋ ค์šด +์ง€ํ•˜ +๋ณธ๊ฒฉ +๋งˆ์ฐฌ +๋งŒ๋“œ +์•„๋‚ด +2014 +ํ˜น์€ +์„ฑ์  +์ธ์ƒ +2008 +๊ธฐ๋… +๋งˆ์ฐฌ๊ฐ€์ง€ +๋“ฑ๋ก +##์ˆ˜๋ก +๋‚˜ํƒ€๋‚˜ +์„ ๋ฐœ +์ผ์ƒ +๊ทธ๋™์•ˆ +์ดˆ๋“ฑ +ํ•˜๋Š˜ +๋ถ„์–‘ +2009 +ํ•œ๋‹ค๋ฉด +์ฆ๊ถŒ +๊ธฐ์ดˆ +์‚ฌ๋ง +๋ฐ›์•„๋“ค +##์ž๊ธฐ +์ด๋ก  +๊ธˆ๋ฆฌ +##๋ผ์ด +ํšŒ๋ณต +๊ณ ์šฉ +์šธ์‚ฐ +์—ฌ์ „ํžˆ +๋– ๋‚˜ +์ผ์–ด๋‚˜ +ํ™•์‹ค +##nd +##๋””์˜ค +์ œํ•œ +๋„˜์–ด +๊ตญ์ • +๊ฐœ์ • +์†Œ์žฌ +์–ด๋„ +๋ง›์žˆ +ํˆฌํ‘œ +##๋ฐ˜๊ธฐ +์ง€์ผœ +์—ฌ๋ถ€ +35 +์‹œ์ผฐ +ํ”„๋ฆฌ +ํ•ด์š” +์ฃผ๋„ +์œผ๋ฉด์„œ +์ˆ˜์ƒ +์ง€๊ธ‰ +ํ•ด์„ +๋ผ์ด +ํŒŒ์•… +๋ถ„๊ธฐ +2013 +##์šฐ์Šค +๋Œ€์ค‘ +์ตœ์ข… +์‹ ๊ฒฝ +์ง‘๋‹จ +ํ˜„์ง€ +500 +์ •๊ถŒ +ํ•™์Šต +2015 +๋ถ€์ธ +์‹คํŒจ +ํ™œ์„ฑ +80 +๊ทœ์ œ +์šฐ์ˆ˜ +ํ†ต์ผ +๊ณ ๋“ฑ +๋…ธ์กฐ +##์Šคํฌ +##ch +์‹œํ‚จ +์‹œ๋„ +๋ณด์ˆ˜ +195 +๋Œ€์„  +์ž‘์šฉ +์ผ์œผ +##์šด๋“œ +์™ธ๊ตญ์ธ +๋ณด์žฅ +์ข…๊ต +๊ฑฐ์ณ +์‚ดํŽด +๊ตฌ๋งค +ํ˜‘ํšŒ +์ž์ฃผ +์š”์ฒญ +์˜ค๋ฅธ +์ถ”์ฒœ +์ด๋ฏธ์ง€ +์ฝ”์Šค +##๋“œ์‹œ +๋ฐ˜๋“œ์‹œ +๋น„์œจ +์„ธ๋ ฅ +์ •๋ฆฌ +๊ณต์ • +194 +๋ฐ•๊ทผ +ํ•™๋…„ +๊ทธ๊ฒŒ +๋Œ€์ฒด +์ž„๊ธˆ +๋งŒ์กฑ +์ „ํ–ˆ +ํฌ์ธํŠธ +of +ํ˜ผ์ž +๋™๋ฌผ +๊ฐˆ๋“ฑ +๊ฐ€๋ฅด +๊ณต๋ฌด์› +์ œ์•ˆ +ํŠน์ง• +์†๋„ +์ œ์กฐ +๋ฏผ๊ฐ„ +๋ฒ•์› +์˜ฌ๋ฆฌ +์˜์ง€ +์ •์˜ +์•ผ๊ตฌ +๊ถŒ๋ ฅ +๊ฑด์ถ• +๋ณดํ†ต +์—ฐ์† +๋‚จ๋ถ +##๋‹ค๊ณ  +๊ณตํ•ญ +์˜ค๋ฅด +์•ฝ์† +๋ฐ•๊ทผํ˜œ +๋ฏ€๋กœ +๊ณ ์š” +์˜์ƒ +๋ ค๋ฉด +์ง€๋ฐฐ +##ed +ํ™”์žฅ +์ผ์ž๋ฆฌ +์•„๋ฆ„๋‹ค +ํ˜‘์ƒ +๋ณด์œ  +์š”์†Œ +๊ทธ๋ฆฌ์Šค +๋…ธ์ธ +ํŠน์ • +๋ณ€ํ˜ธ +๋ถˆ๊ตฌ +์˜์—ญ +์ด๊ฑฐ +๊ฒจ์šธ +์Šคํƒ€์ผ +์™ธ๊ต +์‹ํ’ˆ +์„ฑ๊ฒฉ +๋‹ฌ๋ผ +ํ›จ์”ฌ +์ฒ ํ•™ +์„ ์–ธ +ํ•ญ์ƒ +##์€ํ–‰ +๋‹น์—ฐ +์กธ์—… +๋งค์ผ +000 +์ตœ์†Œ +์ถ•์ œ +ํ†ตํ•œ +ํšŒ์› +๋‚ด๋†“ +์•Œ๋ ค์กŒ +๋„์ „ +๋ฐฉ์นจ +##ํŽ˜์ธ +2007 +๋ฏธ์ˆ  +์›์น™ +##์Šคํƒ€ +์žฌ๋ฏธ +ํ˜ธํ…” +์—ฐ๊ตฌ์› +์ŠคํŠธ +๋‚˜์„œ +์ ˆ์ฐจ +๊ฐ•๋ ฅ +์ฃฝ์Œ +๊ป˜์„œ +์ง„์ • +๋Œ€ํ•™๊ต +ํ˜๋ช… +๋ถˆ์•ˆ +์‹ฌ๋ฆฌ +๊ฒฝ๋ถ +##์–ด์ง„ +๋ฐ”๋‹ค +์ „ํ•ด +๋‘˜๋Ÿฌ +์œ ํ†ต +ํŠน์„ฑ +์ €๋… +์€๋ฐ +ํ•˜๋ฝ +๋ฌผ์งˆ +##ใ…‹ใ…‹ +ํ† ๋ก  +๋ฐ”๋ผ๋ณด +ํ•˜์ด +์„ค๊ณ„ +ํ™๋ณด +์ค‘์†Œ๊ธฐ์—… +์ˆ˜์ˆ  +์†Œํ†ต +์‚ฌํ•ญ +๋งŒ์•ฝ +๋น ์ ธ +๋งˆ๋ฆฌ +๋ถˆ๋Ÿฌ +##20 +๊ทธ๋Ÿฌ๋‹ˆ๊นŒ +##์Šค์ฝ” +##๋ฆฌ์นด +##๋‹ค๊ฐ€ +๊ฐ‘์ž๊ธฐ +์‹œ๊ฐ +์ด๋ฆฌ +๋‚˜๋ˆ„ +๊ต‰์žฅ +##BC +์กฐ์ฐจ +๋‰ด์Šค +๋ฐ”ํƒ• +์ฐฝ์กฐ +๋ถˆ๋ฆฌ +์†Œ์œ  +์ ˆ๋Œ€ +##ํ”„ํŠธ +๊ธฐ๋ถ„ +์ ‘๊ทผ +์—„์ฒญ +์Šค๋Ÿฌ์šด +์ž์น˜ +์‹ค์  +##le +๋ฒ„์Šค +##๋Ÿฌ์Šค +๋Œ€๋‹ต +๋”๋ถˆ +์ธ์ˆ˜ +์‚ฌํƒœ +์›์žฅ +์—ด๋ฆฌ +์ฒด์œก +##์š”์ผ +์ธก๋ฉด +ํ•˜๋‚˜๋‹˜ +์‹ ๊ณ  +๊ณ ๊ฐœ +๋งˆ์ผ€ +๊ทผ๋กœ +์ดˆ๊ธฐ +๊ฒฝ์Ÿ๋ ฅ +์—ฌ์•ผ +ํšŒ๋‹ด +๊ทผ๋ฌด +๋ฌด์—ญ +์œ ๋ฆฌ +๋ด‰์‚ฌ +์–ธ๊ธ‰ +์˜ํ˜น +์žฌ์‚ฐ +์ง์žฅ +๊ทธ๋ž˜๋„ +๋‚ด๋ฆฌ +์ฒญ์†Œ๋…„ +์‚ผ์„ฑ์ „์ž +๋ถˆ๋ฒ• +์‚ฌ์ „ +์ˆ˜์› +##๋‹ˆ์•„ +์ฃผ์˜ +##od +์ฐจ๋ณ„ +ํ”„๋กœ์  +ํƒœ๋„ +์›๋ž˜ +ํ†ต๊ณผ +์•Œ์•„ +์žฅ์†Œ +๋นจ๋ฆฌ +์œผ๋ ค +๊ณจํ”„ +##as +์ฐฝ์—… +์œ ์ผ +์†Œ์‹ +๋Œ์•„๊ฐ€ +2016 +์ทจ์—… +์ž๋…€ +์ˆ˜์—… +##๋‚˜๋ฌด +##re +๊ณ ํ†ต +##ou +์ดฌ์˜ +์—ฌ๋ก  +์•„ํ”„ +๊ฑฐ๋ถ€ +๊ทผ๊ฑฐ +ํ”„๋กœ์ ํŠธ +๋ง๋ถ™ +๊ต‰์žฅํžˆ +์˜ฌ๋ž +45 +๋งˆ์ผ€ํŒ… +๊ฐ•์› +๋ฒ”์ฃ„ +๊ตฌ์† +๊ณ„๊ธฐ +๊ฐ€๋ฆฌ +์ƒ๊ธฐ +์ค‘๋‹จ +์ธ๋ ฅ +์ดํƒˆ +์ž‘๋…„ +์ฃผ๋ฌธ +์•ฝ๊ฐ„ +์ง€ํ‚ค +๋œ๋‹ค๋Š” +๋ฉ๋‹ˆ๋‹ค +์™„์ „ํžˆ +๋ณ€๊ฒฝ +๋Š”๋ฐ์š” +์‹ ๋ขฐ +36 +์ถœ๋ฐœ +ํฌ๊ธฐ +๋ฒ•์ธ +the +##์˜์› +์™„์„ฑ +์ผ๊นŒ +ํƒ€์ด +์กฐํ•ฉ +for +์˜ํ•œ +์‹ฌ๊ฐ +์ œ์ผ +์ธ๊ทผ +in +๋ˆ„๊ฐ€ +์ž ์‹œ +ํ™•์ • +##๋‹ˆ์Šค +33 +์‹ฌ์‚ฌ +์‡ผํ•‘ +๋‹ฌ์„ฑ +##์ด๋“œ +๋“œ๋ฆฌ +๊ณผ์ œ +ํ‘œ์ • +๋ถ€๋ถ€ +300 +##ation +์ปดํ“จ +์ด๋ž€ +ํšจ์œจ +์ด๋•Œ +์ „ํˆฌ +๋ณธ๋‹ค +๋Œ€ํ•˜ +1000 +์ œ์ถœ +๋‚˜์˜จ๋‹ค +์™ธ๋ถ€ +์‹œ๋ฆฌ +์„ธ์›” +๊น€์ • +ํ–ฅํ›„ +๋™์ผ +##์‹ฌํžˆ +์•ˆํƒ€ +๋ฐ”๊พธ +๋ฐœ์–ธ +๋“ฑ๊ธ‰ +๊ต๊ณผ +32 +๋‹ค๋ฆฌ +๋ธŒ๋ผ +๊ธˆ์•ก +ํ™”ํ•™ +์—ฐ๊ธˆ +๊ธˆ์ง€ +##๋ฌด๋ฆฌ +์ปคํ”ผ +๋ฐ”๋€Œ +์ง€์‚ฌ +ํ–ˆ์œผ๋‚˜ +๋น„์ค‘ +##ใ… ใ…  +๊ฐ•ํ•œ +ํ˜•์‹ +๋‚˜์„ฐ +์˜จ๋ผ์ธ +๋…ธ๋™์ž +๋ณธ์ธ +ํ…”๋ ˆ +๋‚˜๋จธ +๋‹ฌ๋ ค +ํ–ฅํ•ด +193 +์†Œ์†ก +์šฐ์ฃผ +๋Œ€์‚ฌ +๋‚˜๋จธ์ง€ +๋ฒ•๋ฅ  +์œผ๋‹ˆ๊นŒ +ํ•ด์–‘ +์ˆ˜์šฉ +์ „ํ˜• +์‚ด์•„ +์‹ ์šฉ +์‹ค์ฒœ +๊ฐ€๋Ÿ‰ +์žฅ๋ฉด +์ฃผ๊ฐ€ +์ €ํฌ +์ž„๋Œ€ +์š”๋ฆฌ +ํ—Œ๋ฒ• +##์„œ๊ด€ +์ปดํ“จํ„ฐ +์˜ค๋žœ +km +๋ณ€ํ˜ธ์‚ฌ +์ž–์•„์š” +๊ฐ€๋“ +๋ฐฐ์น˜ +์•ž๋‘ +๋งˆ์‹œ +๋Š๊ปด +์ „๋‚จ +๋„์ฐฉ +๋ฒ ์ด +์ƒ๋‹ด +์˜ˆ๋ฐฉ +๋ฐํ˜€ +2006 +์ฒ ๋„ +๊ณ„์—ด +##๋‹ค๋ณด +##ent +์‹œํ‚ฌ +๋ฐ์ดํ„ฐ +์žฌ๋‹จ +๋‹น๊ตญ +๋ˆˆ๋ฌผ +๊ทธ๋žฌ +์ „๋ ฅ +ํ• ์ธ +์Šค๋งˆํŠธํฐ +##๊ฑฐ์šด +90 +์˜๋ฌด +์ฃผ์ธ๊ณต +๋งˆ๋ฌด๋ฆฌ +์—ฌ๋Ÿฌ๋ถ„ +##๋ฒคํŠธ +ํ˜œํƒ +๋‹น์„  +๋ฏธ์น˜ +๋ ˆ์ด +๋กœ์šด +๋ฆฌ๋” +์นดํŽ˜ +์ด๋ฒคํŠธ +๋ฒ„๋ ธ +๋งค๋…„ +์™„์ „ +์ˆ˜ํ•™ +์ถœ์ „ +๊ถŒ๋ฆฌ +์ƒ์ง• +์ƒ์ƒ +๊ฐ•๋‚จ +๋…์ž +์ž๊ฒฉ +##ํ…์ธ  +ํ”Œ๋ ˆ์ด +์ค‘๊ฐ„ +์‹œ๋ฆฌ์ฆˆ +์—ฐ๊ตฌ์†Œ +5000 +๋ฐ•์‚ฌ +๋ถ€์—ฌ +๋ฐฐ๊ฒฝ +๋–จ์–ด์ง€ +ํƒœ์–‘ +์ฐฝ์ถœ +์ง€์ˆ˜ +๊ทธ๋Ÿด +์—ด์‹ฌํžˆ +๊ธฐ๋‹ค๋ฆฌ +์ถฉ๋ถ„ํžˆ +์ „๋ฐ˜ +ํŽผ์ณ +์˜ํ•˜ +๋ถ€์ƒ +๋“ค์—ฌ +๋ฏธ๋””์–ด +๋งค์žฅ +##ac +๋ผ์ธ +๊ทธ๋Ÿฌ๋ฉด +ํ†ตํ™” +๋†€๋ผ +์š”์ธ +ํˆฌ์ž… +์ „์šฉ +ํŠน์ˆ˜ +์ผ€์ด +34 +##ํ•™์ƒ +์ž‘์„ฑ +์ธํ„ฐ๋ทฐ +์ด๋ฅธ +์‹์‚ฌ +์•„๋„ +์ข…๋ฅ˜ +์ง์—… +์˜์žฅ +##๊ณ ๊ธฐ +ํ–ฅ์ƒ +์ฒด๊ฒฐ +๊ฐ€์ˆ˜ +ํ‰์†Œ +##๋ผ๋„ +๊ทธ๋ฆฐ +38 +๊ฒ๋‹ˆ๋‹ค +ํ• ์ง€ +์›”๋“œ์ปต +๋™์•„ +์ž๋Š” +๋ถ€์žฅ +์„œ์šธ์‹œ +๋ฉ”์‹œ +์ƒํ˜ธ +์‹คํ—˜ +ํ•™๋ถ€ +์—ฐ์žฅ +์„์ง€ +์œ„ํ•˜ +์ง‘ํ–‰ +์ฑ„์šฉ +๋ง๋ถ™์˜€ +๋…ผ๋ฆฌ +์–ด๋ ค์›€ +์ธํ•œ +์‹ ๊ทœ +ํ†ตํ•ด์„œ +##๋ž€๋“œ +์•„๋ฌด๋ฆฌ +KT +์—ฐ๋Œ€ +ํ”๋“ค +๋ถ€๋Œ€ +๊ฑธ์ณ +๋น ๋ฅด +์ „ํ•˜ +๋ญ”๊ฐ€ +์ด์–ด์ง€ +๋‰ด์š• +์•„๋น  +2017 +๋‚˜์„  +๊ธ์ • +##ํ™”์  +๋„คํŠธ +๋™์˜ +ํ™•์‚ฐ +๊ธฐ๊ตฌ +##us +๋ฏธ๋ฆฌ +๊ณต์œ  +์„ค์ • +๋ฒ—์–ด๋‚˜ +์‹œ์  +์ธ๊ถŒ +๊ตฌ์ž… +๊ตญ๋ฆฝ +๊ตญ๋ฐฉ +๋ฌด๊ธฐ +๋‹นํ•˜ +์›ƒ์Œ +์—ฐ์ถœ +๋ณตํ•ฉ +์ œ๊ตญ +๊ธฐ๊ณ„ +##๊ฐ„๋‹ค +2005 +๊ตฌ๋‚˜ +ํ•œ๋‚˜๋ผ +ํ•œ๋‚˜๋ผ๋‹น +๊ตฌ๋ถ„ +2018 +๊ดœ์ฐฎ +์ฝ˜ํ…์ธ  +์‚ฌ์ดํŠธ +๋ฐ˜๋ณต +์‚ฌ์ • +์ง„๋‹จ +๊พธ์ค€ +์ˆ˜๋‹จ +์ข…๋ชฉ +๋ณด์ƒ +์ˆ˜์„ +๊ธฐ์—ฌ +๋ฌผ๊ฑด +ํˆฌ์ž์ž +ํŒจ์…˜ +๋ฌธ์žฌ +๋งค๋ ฅ +ํญ๋ ฅ +##๋‚œ๋‹ค +์‚ฌ๊ณผ +์•Œ๋ฆฌ +ํ•œ๊ณ„ +์•„๋‹ +๊ตญํšŒ์˜์› +๋งž์ถฐ +์•ผ๋‹น +ํ† ์ง€ +์‹คํ˜„ +๊ธฐ์šธ +๋ถˆํŽธ +์ฒ ์ € +ํ๋ฆ„ +##ro +์œก์„ฑ +๊ตฌ์—ญ +๋‚˜์ค‘ +##ag +์ˆœ์œ„ +ํŒ๊ฒฐ +ํœด๋Œ€ +์—ฐ๊ณ„ +๋ถ€์ง€ +์œ„๋ฐ˜ +์ˆ˜๋งŽ +๊ทธ๊ฑฐ +์ ์  +์ถœํŒ +์„ ์ง„ +์•„๋ฆ„๋‹ค์šด +##๋ฐ”์ผ +๋„์™€ +์˜ˆ๋น„ +๊ณ ๋“ฑํ•™๊ต +๋ช…๋ น +##์ผ๋ณด +๋‚˜๊ฐˆ +์˜ฌ๋ ค +๋‹ค๋ฉฐ +ํ‘œ์‹œ +๋ชจ๋ฐ”์ผ +37 +๊ท ํ˜• +ํ™•์žฅ +์‚ดํŽด๋ณด +๊ตฌ์š” +๊ตญํ†  +์ฑ„๊ถŒ +์žฅ์น˜ +๋ณด๊ณ ์„œ +๋˜‘๊ฐ™ +์ถ”์ • +##์›Œํฌ +์ž๋ž‘ +์ดํƒˆ๋ฆฌ์•„ +์™€์„œ +2004 +๋ถ„๋ฅ˜ +ํ•™์› +๋†์—… +๊ฐ€์„ +์ฃผ๋ง +๋น„ํ–‰ +์ง€์ƒ +์Šคํ†  +์—ฐ์ฃผ +๊ฐ–์ถ” +๊ต์ฒด +๋‹จ์œ„ +์ ์ ˆ +๋Œ€๋ณ€ +์ƒํƒœ +##ula +๊ณ ๊ธฐ +๋ฌด๋ฃŒ +๋ฉด์  +๋กœ๋งˆ +๊ทน๋ณต +๊ต์œก์ฒญ +๋น„๋ฐ€ +๊ฒฐ์ฝ” +ํ†ต์ œ +์ฆ๊ฑฐ +form +์ž…์ฃผ +์•„๋‹Œ๊ฐ€ +๋“ ์ง€ +์˜๋„ +์†Œ์žฅ +์ง€์‹œ +##๋ฐ˜๋„ +๋ฌด๋ฆฌ +์ฒญ๊ตฌ +๋œ๋‹ค๊ณ  +์˜€์œผ๋ฉฐ +์œ„ํ˜‘ +ํ—ˆ์šฉ +๋งˆ์ € +์ด์„  +๋Š”๋‹ค๋Š” +์ด๋ช… +๋ชจ์•„ +๊ฐ€๊นŒ์šด +๊ทธ๊ณณ +๊ณ ์–‘ +๊ณ„์‚ฐ +์‹œ์ธ +๋””์ง€ +์ค„์ด +55 +์ถ”๊ตฌ +์˜์‹ฌ +๊ธฐ๋… +๋ฐ๋ท” +์ฃผ์ฒด +๋ฒ”์œ„ +ํ• ๋จธ๋‹ˆ +์‹œ์„  +ํŽธ์ง€ +๋ด์•ผ +๋ณด๊ฑด +ํฌ๊ธฐ +๊ฐ„๋‹จ +2002 +์ •๋น„ +๊ฒฝํ–ฅ +##ad +๋ถˆ๊ฐ€ +์ˆ˜์ • +์ƒ๋‹นํžˆ +์—์Šค +๋ฌผ๋ฆฌ +formula +๋ฐฑํ™”์  +๊ด€๊ฐ +๋…ธ์„  +๋ฐœํ–‰ +and +๋ฉ”์‹œ์ง€ +๋งˆ์ด +์šด์šฉ +๋ชจ์ง‘ +##๋ฅดํฌ +์ถฉ๊ฒฉ +๋‹น์žฅ +๋ฏผ์ฃผ์ฃผ์˜ +๊ณ ์† +๋ˆ„๊ตฐ +๊ธด์žฅ +##ํŽ˜์ด์ง€ +์ง„๋ณด +๋™๊ธฐ +ํ–ˆ์—ˆ +๊ณผ์—ฐ +๋ฒ ํŠธ +ํ™ˆํŽ˜์ด์ง€ +์•„๋™ +๊ธฐ์› +๊ฐ€๊นŒ์ด +์ •๊ทœ +์ ˆ๋ฐ˜ +์—ฐ๊ฐ„ +The +##id +์ธํ•˜ +์žฅ๋น„ +๊ฐ•์˜ +์šด์ „ +๋‘๋ ค +์˜คํ”ˆ +๋ถ„๋ฆฌ +์ด๋‹ฌ +๋””์ง€ํ„ธ +๋ฟŒ๋ฆฌ +๋‹ค์„ฏ +์ž„๋ช… +์ „๊ฐœ +์ „๋…„ +##ig +๋ฐ˜๋ฐœ +๋– ์˜ฌ +์˜ˆ์ „ +##๊ฐ€๋ฝ +##ํ™˜๊ฒฝ +ํ™œ์•ฝ +์‹ค์žฅ +๋ชจ์ž„ +๋ง›์ง‘ +์›์ž +๋ฉ”์ด +์›€์ง์ด +์ „๋ถ€ +ํŽธ์˜ +๊ทธ๊ฑด +๋ฐ”๋‹ฅ +์•„๋ฅด +์ง„์‹ค +์ ๊ฒ€ +์„ธํŠธ +๋ฐฉ์ง€ +์›๋‚ด +ํ”ผํ•ด์ž +ํ•œ๋ฐ˜๋„ +์•„๊ธฐ +191 +39 +๋ชจ์—ฌ +๊ณผ์žฅ +์ธ๋ฏผ +๋Š˜์–ด๋‚˜ +๋“ค์–ด๊ฐ” +๋„คํŠธ์›Œํฌ +๋ชจ๋ฅธ๋‹ค +##๋‹ค๋ฉด +์š”๊ธˆ +์ž๊ทน +๊ธฐ๋…๊ต +ํ•ด์†Œ +์ œ๊ฑฐ +48 +๊ทผ๋Œ€ +๋”ฐ๋œป +์ƒˆ๋กญ +์†Œํ”„ํŠธ +๊ณตํ™” +##ec +๊ตฌ๊ฐ„ +์™„ํ™” +์„ธ๊ธˆ +๊ณ ๊ธ‰ +๊ต๋ฅ˜ +์ž์œจ +์ถœ๋งˆ +์œ ํ˜• +๊ฒฐํ•ฉ +ใ…Žใ…Ž +๊ธฐํƒ€ +์ˆ˜์ˆ˜ +์ผ์ œ +์œ ์‚ฌ +๋™์ƒ +๋งˆ์นจ +์ง€์  +์ƒ๊ธด +์ž์‹ +IT +ํ์ง€ +์ธ์žฌ +KBS +๋‹ค์†Œ +##๋Ÿผํ”„ +์ฆ๊ธฐ +๊ถ๊ธˆ +๋Œ๋ ค +์ง€ํœ˜ +์ด์žฅ +์˜ˆ์ˆ˜ +ํฌ๋ฆฌ์Šค +๋ถ€์ฒ˜ +์•ˆ๋ณด +๊ฒฐ๋ก  +๋‹จ์–ด +๋ณด์กฐ +๋ฒŒ์ด +2003 +ํ•ฉ๋ฆฌ +์ด๊ฑด +๊ฐ€๋กœ +##๋ฉ”๋ผ +ํ˜๋Ÿฌ +๊ณต์•ฝ +๋Œ€๋‹จ +๋ฌธ์ž +##em +65 +๊ฐ•ํ•˜ +๋Œ€์šฐ +๋‹ค์ด์–ด +๊ณต๊ธฐ +๋น„๋‚œ +๋ฏธ์‚ฌ +42 +๋ณ„๋กœ +๊ฐœ๊ตญ +์™œ๋ƒ +๋ฒ ํŠธ๋‚จ +์ •๊ธฐ +๋‹ค์ด +๊ณ ํ–ฅ +##์‚ฐ๋ฌผ +ํ•™์ž +ํ›„๋ฐ˜ +ํ†ตํ•˜ +์–ด๊นจ +๊ฒฝ๊ณ„ +##๋น„์•„ +##un +์ตœ์ € +๋ฐํžˆ +๋ฉ”๋‰ด +์‹ค์งˆ +์ง€๋„์ž +์ฐฝ์› +์ƒ๊ด€ +์บ๋ฆญ +์‹ค๋ ฅ +๋ช…์˜ˆ +์˜ค๋Š˜๋‚  +์˜ˆ์ธก +์‹ธ์›€ +๋๋‚˜ +๋“ค์–ด์˜ค +์ฃผ์ผ +##๋Ÿฌ์šด +์œผ๋ฏ€๋กœ +ํˆฌ์Ÿ +์ •์ฒด +์–ด์ฉŒ +192 +์ง€์ž +๋‚˜์•„๊ฐ€ +๊ณต๋™์ฒด +์›€์ง์ž„ +##ot +์–ด๋–  +##์–ด๋‚  +๊ฐ•์ œ +์ธ์ฆ +๊ฒฝ์ฃผ +์„ธ์šฐ +์ƒ์ฒ˜ +๊ด€์  +๋ฌด์‹œ +์บ๋ฆญํ„ฐ +๋ถ€ํ’ˆ +MBC +์ฃผ์ฐจ +๋‚ด์ผ +์žฅ์  +๋‘˜์งธ +๊ทธ๊ฑธ +๊ทผ๋ณธ +์ฒœ๋งŒ +๊ด€๊ด‘๊ฐ +๋ฒˆํ˜ธ +๋น„์ƒ +๊ณต๋‹จ +์—ฐ์Šต +43 +41 +์ด๋ž˜ +๊น€์˜ +์•จ๋ฒ” +๊ตฐ๋Œ€ +๋™๋ฃŒ +์งˆํ™˜ +##ow +์ž์ „ +ํฌ์ƒ +์–ด์šธ +์ด๋‚ด +ํด๋Ÿฝ +์—๊ฒŒ์„œ +ํž˜๋“  +๋น ๋ฅธ +๋‚˜์˜ฌ +##๋ผ์Šค +์ตœ์„  +ํ•ด์กŒ +๋‚˜์š” +๋ถ€๋“œ +๋งŒ๋‚œ +๋ฉด์€ +๊ฑฐ๋“ญ +๋”ฐ๋กœ +##๋ง๋กœ +ํŠธ๋Ÿผํ”„ +๋ฒŒ์จ +์ ์ˆ˜ +์‘๋‹ต +์ค‘ํ•™๊ต +75 +๊ทธ๋ ค +์™„๋ฒฝ +๋Œ์–ด +๋•๋ถ„ +๊ตญ์žฅ +์–ด์ฐŒ +์ด์„ฑ +์‹๋‹น +๋ถ€ํƒ +์ˆ˜๋ฆฝ +##๋ž€๋‹ค +ํŒŒ๊ดด +ํˆฌ์ˆ˜ +๊ณต์ฒœ +์•Œ๋ ค์ ธ +##ter +๊ฑฐ๋Œ€ +๋ณต์žก +์„ธ์›Œ +๊ฑฐ์ฃผ +๊ณต๊ฐ +๋ณ„๋„ +ํˆฌ์–ด +๊ธฐ๋ถ€ +ํƒ„์ƒ +์—ญ๋Ÿ‰ +๋ฉค๋ฒ„ +์ฝ”๋ฆฌ์•„ +์ฒ˜๋ฒŒ +๋ธ”๋ž™ +๋„์„œ๊ด€ +์ด์™ธ +์ด๊ธฐ +๋ฐ•๋ฌผ +ํ•œ๋ฐ +์žฌ๋ฃŒ +๊ฒŒ๋‹ค๊ฐ€ +ํ• ๊นŒ +์Šˆํผ +์˜ฌ๋ ธ +##๋ฅดํŠธ +ํ‘œ์ค€ +๋ˆ„๊ตฐ๊ฐ€ +์ •ํ•ด +44 +ํ’ˆ์งˆ +๋™์› +##์•„์ง€ +##๋ฆฌ์˜ค +์ฑ„๋„ +๋จธ๋ฌผ +ํšŒ๊ฒฌ +์†ํ•ด +์ถฉ๋‚จ +๋ฐฐ์ถœ +๋‹จ์ผ +์‚ฌ์—…์ž +์ƒˆ๋ฒฝ +์ด๋ช…๋ฐ• +์ง€์ž์ฒด +๋งค๊ฐ +๋ฐœ๋‹ฌ +ํ™ฉ์ œ +์‹œ์œ„ +์ŠคํŽ˜์ธ +์ž…ํ•™ +ํ•„์ˆ˜ +๋ณด๋ƒˆ +๋‹ค๋ฃจ +์Šคํ…Œ +##์–ด์ ธ +1990 +์ „๋ฉด +์• ํ”Œ +ํ‰์ƒ +##๋ฉ”๋‹ฌ +์ง€๋ถ„ +##์‚ฌ์ž +๋ฌธ์žฌ์ธ +##๊ตฌ์š” +##๋ฆฌํŠธ +ํŒŒ๋ฆฌ +์กฐ์šฉ +์•„๋‹™ +์ œ๋ชฉ +์Šน๋ถ€ +๋“ค๋ ค +์˜ฌ๋ผ๊ฐ€ +์–ด์ œ +46 +์—ฐ๊ทน +์งํ›„ +๋‚ด์„ธ +์ด์›ƒ +์ทจ์ž„ +๊ณต์‚ฐ +ํ˜‘์•ฝ +์ฃผ์œ„ +์•ˆ๋‚ด +##์ค€๋‹ค +๋ฌผ๋Ÿฌ +์‹ค์ˆ˜ +๋‹ค์ˆ˜ +๋ฐ•๋ฌผ๊ด€ +์นด๋ฉ”๋ผ +3000 +๊ฐœํŽธ +##๊ณต์‚ฌ +๊ตญ๋ฌด +์ง‘์•ˆ +๊ทธ๋งŒํผ +๋„๋• +๋Œ€์•ˆ +์ž„์‹œ +๊ฐœ์ž… +๊ฑฐ์ง“ +๋Œ€๋ฆฌ +๊ฐ์•ˆ +๊ฑด๋„ˆ +400 +๊พธ์ค€ํžˆ +๊ฐ€์Šค +๋ถ„๋ช…ํžˆ +์„ ๊ณ  +์ฃผ๋…„ +์›๋ฆฌ +๋ฐœ๊ตด +์นœํ™˜๊ฒฝ +๊ทผ๋กœ์ž +47 +ํ™ˆ๋Ÿฐ +ํ•œํ™” +์†Œ๋…€ +๋‚ด๋ ธ +ํ•œ๊ตญ์ธ +๊ด€๋žŒ +๋›ฐ์–ด +##um +์˜ฎ๊ฒจ +๊ฐ–์ถ˜ +๋ˆˆ๊ธธ +ํ†ต๊ณ„ +์ด์ž +์œ ๋„ +์ฒ˜๋ถ„ +๋™๋„ค +2001 +##์˜ค๋ฅด +์ง„์ž… +์–‘์„ฑ +์ž์ „๊ฑฐ +##os +์–ธ์ œ๋‚˜ +##ํ•˜๋ฉด +๊ณต๋ชจ +๊ตฌ๋‹จ +๊ตฐ์š” +ํ›„์› +์ €์ถ• +##ํšŒ์žฅ +๋ฏธ์†Œ +๋น„๋ก +๋Œ€๋ฒ• +ํ†ต์ƒ +์‚ฌ์‹ค์ƒ +์ž‘์ „ +๋งž์ถ” +์ข‹์•„ +๊ฐ๊ฐ +์„์œ  +์•„์ด๋””์–ด +์ฑ”ํ”ผ +๋น„์ฆˆ +์ผ์ข… +์˜์–‘ +ํ”„๋ฆฌ๋ฏธ +๋ฒ•์•ˆ +๋ฐœํœ˜ +์Šน์ธ +52 +์ง„๋‹ค +๋ถ€์ฑ„ +ํ˜•์‚ฌ +์—ฐ๋ฝ +ํŽธ์„ฑ +49 +##๋ฆฌ์–ด +์ •์› +๋Œ€๊ธฐ +๋ผ์•ผ +ํ™”๋ ค +๊ถŒํ•œ +์œ ๊ฐ€ +##์‹œ์˜ค +๋ณด์ฆ +ํ˜„๋Œ€์ฐจ +์„ฑ๋ถ„ +๊ฐœ๋ฐฉ +์•„๋ฌด๋Ÿฐ +์กฐ๊ธฐ +๊นŠ์ด +์–ธ๋‹ˆ +์„œ์šธ๋Œ€ +๋ฉ€๋ฆฌ +์ง€๋‚ด +๋”๋ผ๊ณ  +๋–จ์–ด์ง„ +์ถœ๋ฒ” +์ €์ž +์งˆ์„œ +์ธ๋ฅ˜ +๋–จ์–ด์ ธ +๊ณ ์„œ +๋ฐฉ์–ด +ํ‚ค์šฐ +ํ•™๋ถ€๋ชจ +##ay +๊ณตํฌ +๋ฐ”๋ฅด +์ž๋ฉด +์กฐ๊ฐ +๋ฏธ๊ตฐ +์งœ๋ฆฌ +##์„ผํŠธ +์šดํ–‰ +##TA +๋‹ค๋…€ +๋‚˜๋ˆ  +๊ธฐ๋„ +##ํ”„๋ผ +190 +์•„์šธ +์ด‰๊ตฌ +์ด์Šน +์ˆ˜๋Šฅ +๋งค์ฒด +๋ฒ„๋ฆฐ +ํญ๋ฐœ +๋ถˆ๋งŒ +๋ƒ„์ƒˆ +์ง€๋‹Œ +51 +##์•„๋ฒ„์ง€ +์•„๊นŒ +##์ง€์–ด +CE +์กฐ์ ˆ +๊ฑด๋ฆฝ +์‹ ์ฒด +ํ•˜๊ณ  +##๋กœ์Šค +๊ฐ€์กŒ +์ˆ˜๋น„ +##๋ฎค๋‹ˆ +๋ถ€์ž +๊ณ ๊ต +์†Œ๋…„ +์ปค๋ฎค๋‹ˆ +๋„ˆ๋ฌด๋‚˜ +ํŒ€์žฅ +๊ทธ์ € +ํ—ˆ๋ฆฌ +์ „๊ณต +์‚ฌ๋‚ด +ํฌํ•ญ +์—ฌ์œ  +๊ทธ๋‚  +์บ ํ”„ +์ƒ๊ฒผ +ํ—ˆ๊ฐ€ +๋ฐ์ด +์˜ค๋ฆฌ +์ข…์ด +์ƒ๊ฒจ +์œค๋ฆฌ +์ดˆ๋Œ€ +KB +๋‹น์—ฐํžˆ +์ด์Šค +์›์ „ +##ir +์„ค๋“ +์†์‹ค +๋‚˜๊ฐ” +์ ์ฐจ +์‹œ๋‚ด +##๋Œ€ํ‘œ +์‚ฌ๋ฌด์‹ค +ํˆฌ๋ช… +##ve +##og +๋›ฐ์–ด๋‚œ +ํƒ€์ž„ +์ฃผ๊ฑฐ +์ƒ์กด +๋งŒ๋‚ฌ +##๋ฐ”๋กœ +ํ˜น์‹œ +๋ณธ์งˆ +##ํ•™์ž +๊ทธ๋Ÿฌ๋ฏ€๋กœ +์ดˆ๋ฐ˜ +๊ธฐ์— +๋…ผ๋ฌธ +##๋ฒ„์Šค +ํฅ๋ฏธ +์„œ๊ตฌ +ํ›Œ๋ฅญ +๋ชปํ•ด +ํ…Œ์ด +๊ฐœ๋ณ„ +์„ฑ์ธ +์‹œํ‹ฐ +์—„์ฒญ๋‚œ +์ด๋ฅธ๋ฐ” +๊ฐœ์„ฑ +๋‹นํ–ˆ +##ce +๊ฑธ์–ด +์‚ฌ์œ  +##ess +##์–ด์กŒ +์ œ์ • +53 +##ํ”ผ์Šค +์•…ํ™” +๋†€์ด +์ธก์ • +๊ฒ€์ฆ +St +๊ณ„์ธต +ํ˜„๊ธˆ +๋ณด๋‚ธ +์ฆ์ƒ +์ทจ์ง€ +ํ˜ผ๋ž€ +์Šต๊ด€ +์—…์ข… +์‹ ์„ค +๋‚ ์”จ +์–ด๋ฅธ +##ํšŒ์‚ฌ +##์ผ€์ด์…˜ +##ers +๋ณต๊ท€ +์ฃผ๊ด€ +๊ณณ๊ณณ +์„ ํ˜ธ +๊ธฐ์†Œ +์ผ์ฐ +๋‹ด๋ฐฐ +์˜›๋‚  +๋‹คํ–‰ +##ri +ํŽธ์ง‘ +์น˜์—ด +๊ด€์ฐฐ +์‹ ํ™” +์ธ๊ณต +์ถ”์„ธ +๋ฌผ๋Ÿ‰ +์™œ๋ƒํ•˜๋ฉด +๋ณด์•ˆ +์‹ฌ์ง€์–ด +์ทจ์†Œ +๋Œ€์žฅ +๋น ์ง€ +๋ถ€์„œ +๋ฎค์ง€ +์ฒจ๋‹จ +์ƒ์—… +150 +์ด์žฌ +์˜ค๋น  +์œ ๊ธฐ +๋ฐ˜๋„ +54 +์ ‘์ˆ˜ +๋“์  +ํ™”๋ฉด +์•„์˜ˆ +์•Œ๋ ค์ง„ +๊ด‘์—ญ +ํ˜•์ œ +๊ทœ์น™ +##op +๋™๋ฐ˜ +์—ญ๋Œ€ +๋™๋ถ€ +๊ทน์žฅ +์ž๊ณ  +๊ณ ์ • +์ฒซ์งธ +์œ„์•ˆ +๋‚˜ํƒ€๋‚ด +๊ณ ์œ„ +์—”์ง„ +์ƒ๋ฐ˜๊ธฐ +ํ•™๊ณผ +์„ธํฌ +์•„์šฐ +๋‹ค์ด์–ดํŠธ +์‹ ๋ผ +์—ด์ • +๊ทธ๋‹ˆ๊นŒ +์—์ด +์•„์‰ฌ +๊ฑฐ๋‘ +1980 +ํ™œ๋ฐœ +์‹๋ฌผ +ํ’๋ถ€ +ํ™”์žฌ +๊ทผ์ฒ˜ +๋ฏธ์‚ฌ์ผ +์ง€๋‹ˆ +ํด๋ž˜ +๋ฐœ์‚ฌ +๊ธฐ๊ธฐ +๋Š๋‚„ +ํ•™๋ฌธ +๊ฐ๋™ +##ut +๋ถ€ํšŒ์žฅ +##๋“œ๋ฆฌ +์‹คํ–‰ +๊ฒฝ์„  +56 +์˜๋ฌธ +๊ณ ๋Œ€ +๋ถˆ๊ต +ํ•ฉ๊ฒฉ +์–‘์‹ +์ทจํ•˜ +๋น„์ฆˆ๋‹ˆ์Šค +๋ฐ”๊ฟ” +์ค„์–ด๋“ค +ํ•˜๋ฐ˜๊ธฐ +##ํ„ฐ๋ฆฌ +์ œ์žฌ +ํ™”์ œ +๊ฒฐ์Šน +ํฌ์Šค์ฝ” +##์œผ๋‹ˆ +์šฉ์–ด +##๋‹ค๋ฅธ +์šด๋ช… +ํ…๋ฐ +##๋‚˜๋‹ค +๋‚จ๋…€ +์„ค๋น„ +##๋น„์ „ +๋น„์ „ +์œ ์—” +์ŠคํŠธ๋ ˆ์Šค +๋‚ด๊ฒŒ +์ง€ํ•˜์ฒ  +์†Œ๋ฐฉ +์•ˆ๋…• +ํƒ€์ž +์ด๋ž‘ +๋ถ€์ง„ +์ฆ๋ช… +##๋Ÿฌ๋ฆฌ +์ƒ์žฅ +์ฃผ๋ ฅ +์ƒ๋ฌผ +##์›จ์–ด +๋‹น์ดˆ +์˜์กด +๊ฒฝ๋ ฅ +๋ชฐ๋ž +ํ™”์žฅํ’ˆ +์••๋ฐ• +์กฐ์‹ฌ +๋Š์ž„ +๊ฐœ๋ง‰ +๋Œ์•„์˜ค +##โ €โ € +์ €ํ•ญ +์ฆ๊ธธ +##๋‹ค๋ž€ +๋ŒํŒŒ +์ถ•์†Œ +๊ฒฝ์ƒ +์—ฌ์ธ +๊ฐ•์›๋„ +##ood +์‹ฌ์žฅ +์‚ด์•„๊ฐ€ +๊ณตํ†ต +๋กœ์จ +์ตœ๋Œ€ํ•œ +๋„ค๋„ค +์ž๋ผ +์ธ์ •๋ฐ› +์ƒ๋Œ€๋ฐฉ +์ˆœ์ˆ˜ +์‚ฌ๋ฒ• +์„œ๋ฏผ +59 +์ฐธ๊ณ  +๋‹ฌํ•˜ +๋‹จ๋… +ํ•ด๋ฐฉ +๊ด‘์žฅ +๋‚จ๋ถ€ +##ap +๋˜์ง€ +๊นจ๋— +๋…น์ƒ‰ +๋ชปํ•  +๋งค์ˆ˜ +ํ๋ฅด +์ „์ œ +์žฅ๊ตฐ +์ €์ง€ +๋‹จ์ˆœํžˆ +ํ”ํžˆ +##oc +##ํ‹ฐ๋ธŒ +๊ฑธ์Œ +๋‹ค์šด +์†Œํ™” +์‹ ํ˜ธ +1999 +ํ‚ค์›Œ +์ˆซ์ž +๋ช…ํ™• +ํŒ์‚ฌ +ํฌ๋ฆผ +##๋งˆ์ž +๋ผ๋””์˜ค +In +ํŒŒ๊ฒฌ +์˜ค์—ผ +์ฑ„ํƒ +๋Œ€๋ณ€์ธ +๋‹จ๊ธฐ +1998 +์ปฌ๋Ÿฌ +์ „๋‚  +๋“œ๋Ÿฌ๋‚ด +##๋‹ค๋‹ˆ +85 +๊ฐ€๊ฒŒ +๋ฐœ๋งค +์„ ๋ฐฐ +๋งค๋งค +##ww +์ง€ํ–ฅ +๋ฒ•์ • +๋ถ„๋…ธ +๋งค๋‹ฌ +๋ชจ์ž +๋„์ฟ„ +์ด๋ฃจ์–ด์ง€ +๋ฐํžŒ +๊ฒ€์ƒ‰ +๊ตฌํ•˜ +www +์ž์‚ด +๋ผ์ง€ +๋๋‚ด +์ง€์œ„ +ํŽธ์•ˆ +์ ์‹ฌ +๋งž์ถค +์˜์› +ํš๋“ +64 +์„ธ์ข… +์ˆ˜์‹ญ +##๊ณต์—… +์ˆœํ™˜ +์ด์Šˆ +์žฌ๋ฌด +์™„๋ฃŒ +๋ฒˆ์—ญ +์›๋‚ด๋Œ€ํ‘œ +ํƒ์‹œ +์•„์šธ๋Ÿฌ +ํŒŒํŠธ +์„ฑํ–ฅ +์„ ๋ฐ• +์žฌ๋ฏธ์žˆ +์ง‘ํšŒ +1997 +์ž๊พธ +๋ฐ›์•„๋“ค์ด +๋ผ์šด๋“œ +์‹œ์› +์„œ๋ฅ˜ +์—ฌ๋‹น +์ž๋งˆ์ž +์—ฌ๊ฑด +ํ˜‘์˜ํšŒ +์‚ฌ์› +58 +์‹ ๋ถ„ +##์„ธ์š” +์ฐพ์•„๊ฐ€ +57 +๋Ÿฐ๋˜ +ํ”ผํ•˜ +๊ด€์ธก +##์ง์ž +์‚ด๋ฆฌ +์ทจ์žฌ +์ด๋ค„์ง€ +๊ต๊ณผ์„œ +๋ฐ”๋ž€๋‹ค +๋…ธ๋ฌด +๋ชฉ์ˆจ +์ธ์› +์Šค๋‹˜ +##์„ธ๋Œ€ +๊ฒฝ๊ณ  +๊ฒฌํ•ด +์ทจ๋“ +ํŒŒ์ด +๋‚˜์œ +์ดˆ์ฒญ +์‚ฌ๋ น +์†Œ์ค‘ +์œ„์ฃผ +ํ˜‘์ • +์ดํ–‰ +์กฐ์ž‘ +์—ฐ๋ฐฉ +์‚ด์ธ +ํ˜ธํก +์œก์•„ +ํผ์„ผํŠธ +์บ๋‚˜๋‹ค +๋†๋ฏผ +๋ธŒ๋ผ์งˆ +๊ฐœ๋ด‰ +๋ฌด์ƒ +๋ฌธ์žฅ +๋…ธ์ถœ +ํŽผ์น˜ +##์–ด๋„ +์ผ์–ด๋‚œ +๋Œ€๊ฐ€ +com +๋˜๊ฒŒ +์—ฐ๋ง +๋ณด๊ด€ +๋งˆ์ฃผ +์šฉ๊ธฐ +ํ˜ธ์ฃผ +๊ฐ€๋‚œ +์ฐจ๋‹จ +ํ•ด์šด +์ธํ”„๋ผ +๋Œ€ํ•™์ƒ +๋‘์‚ฐ +ํ•ด์ง„ +์ž„์› +์ฑ”ํ”ผ์–ธ +๋ณ€๋™ +##๊ตฌ๋‚˜ +์ •์‹ +##์œผ๋ฉด +ํ”„๋ ˆ +์งˆ๋ณ‘ +๋งค์ž… +##๊ต์œก +์„ ์ถœ +๊ฑธ๋ฆฌ +์–ด์จŒ +##ie +์ž์•„ +๋งฅ์ฃผ +ํ•ฉ๋ณ‘ +์—ฌ์ˆ˜ +ํ…”๋ ˆ๋น„์ „ +์˜ํ•™ +์ ์œ  +##์›จ์ด +์กฐ์–ธ +๋ฌธ์ œ์  +์ž ์žฌ +ํ˜ธ์†Œ +๋น„๋ฆฌ +๋ณธ์‚ฌ +์‹ ์„ธ +##im +๋…์„œ +๊ธฐ์•„ +์ง€๋Œ€ +์ƒ์œ„ +๋ฌผ๊ฐ€ +๋งŒํ™” +๋ณด์กด +์†”์ง +๋Œ€๋ฆฝ +์ œ์•ฝ +PC +๋…ํŠน +์‹ค์—… +์ „์„ธ +์Šคํ† ๋ฆฌ +๋“œ๋Ÿฌ๋‚˜ +์ถฉ๋Œ +์˜ค๋žซ +##์นœ๋‹ค +##์ง์› +์ ํ•ฉ +์‚ฌ๋‹จ +##๋ž€ํžˆ +์—ฐ์˜ˆ +์‚ฌ๊ธฐ +์ผ๋Œ€ +๋ฐฑ์„ฑ +##์€์ด +์•„ํ”„๋ฆฌ์นด +๋ฐฉํ•ด +์‹œ์‚ฌ +๊ฐ์‹œ +๋–จ์–ด์กŒ +๊ฐ€๋” +์„œ์–‘ +์ด์‚ฌ์žฅ +๊บผ๋‚ด +ํฌ์žฅ +ํ™์ฝฉ +์Œ๋ฐ˜ +์ดํšŒ +๋‹นํ•œ +##ity +์ •์น˜์ธ +##ํ”Œ๋ ˆ์ด +Ch +์œ ๋‹ˆ +์ „๋ถ +63 +ํšŒ๊ณ„ +##IA +๊ธฐ์ง€ +๋ฌด๋ฆŽ +CEO +๋…€์„ +ํŠนํ—ˆ +๋„์‹ฌ +์•„์ดํ…œ +์‚ฌ๋ฌผ +์ •์ฐฉ +์ปค๋‹ค๋ž€ +๋ฏธ์•ˆ +๋ฉด์ ‘ +2020 +๊ตํ™˜ +์—ฐ์ˆ˜ +์ˆ˜์˜ +๊ฐ€๋™ +์ฒœ์•ˆ +์‚ฌํ‡ด +์†Œ๋ฌธ +์ €๋ ด +์ง„์ˆ  +๋‚˜๋ฆ„ +66 +์กฐ๋ช… +##and +๋ชฐ๋ผ +์€ํ‡ด +##NS +์˜ค๋žซ๋™์•ˆ +์ฒญ์žฅ +์ •ํ•˜ +์ •์ง€ +ํ‰๋“ฑ +์Šค์œ„ +์‚ฌ๋ผ์ง€ +์—์„œ๋ถ€ํ„ฐ +์นจ์ฒด +๋ฐด๋“œ +๊ทธ๋ฆฌ์Šค๋„ +##์นด๋“œ +๊ฒฝ๊ธฐ์žฅ +์†Œ์ˆ˜ +์ค˜์•ผ +๋Š๊ผˆ +##์›Œ์„œ +์Šคํ”ผ +ํ† ๋Œ€ +์ถฉ์ฒญ +์ง€์ฃผ +์ง€๋ช… +๋ฌธ์„œ +์‹ฌํŒ +๋Š”๋‹ค๊ณ  +์‹ฌ์˜ +์ดํ‹€ +์ง์ „ +ํŽ˜์ด์Šค +##์„œํŠธ +๋ถ€๊ณผ +##์กฐํ•ฉ +๋ธŒ๋ฆฌ +๊ณ ๊ตฌ +๋ฌด๋ ต +๊ณผ๋ชฉ +๋Š˜๋ฆฌ +##์œผ๋กœ +๋“ค์–ด์„œ +์ƒํ•˜ +๋งˆ๊ฐ +FC +์ˆ˜์‹œ +๋ฌด๋ ค +##์ด๋ฆฌ +๊ฐ์ž +๋ถ€์œ„ +์˜ˆ๊ณ  +๋งˆ์นจ๋‚ด +๊ณ„๊ธ‰ +์†Œ๋ จ +์ƒ์ธ +##ํ•ด์„œ +์ด๋… +๋ฐฐ์ œ +99 +์ž๋„ค +๋‹ต๋ณ€ +##์ด๋„ˆ +84 +๊ฒฝ๋น„ +์ดˆ์  +๋งŒ๋‚จ +์ปค์ง€ +์œ ๋Œ€ +์™ผ์ชฝ +์„ฑ๋‚จ +์ „์› +์ธ์šฉ +๋ฉฐ์น  +์˜€์œผ๋‚˜ +๋ฐ”์ด์˜ค +์—์–ด +ํ”ผ์•„ +๊ณต์ฃผ +๋ฌ˜์‚ฌ +ํŒŒํฌ +๋กœ๋ด‡ +์ฆ๊ฑฐ +๊ฒฐ์ œ +์„ฑ๋Šฅ +ํ• ์•„๋ฒ„์ง€ +์ฃผ์ฃผ +๊น€์น˜ +๋งˆ๋ฅด +์ผ์œผํ‚ค +๋ชฐ๋ ค +์‹ ์•™ +##๋‹ดํšŒ +์ง€๋‚˜์น˜ +๊ณ„์—ด์‚ฌ +์ด๋ผ +72 +๋†์ดŒ +๋ฌดํ•œ +์˜ˆ์•ฝ +ํŠธ๋ฆฌ +๋™์–‘ +๋Œ€๊ฒฐ +๊ฐ๊ด€ +์—ฐ๋งน +๋Œ€๋ถ +##๋งˆ๋”” +์žฅ์‚ฌ +ํ˜ธ๋‚จ +ํ•˜์—ฌ +๊ณ ์†Œ +์ž‡๋”ฐ +๊ณ ์ „ +์ž ๊น +##๋‹ˆ์–ด +์˜ค๋ฐ” +์‹œ์ฒญ์ž +##๋œจ๋ฆฌ +ํ•™๊ธฐ +๋ฐ•์ • +๊ฑฐ๋ก  +์ ์ž +๋ช…์นญ +ํ’๊ฒฝ +##์Šคํ‹ฐ +61 +์•„๋ฌด๊ฒƒ +๋ถ€์‹ค +์ถœ์ž… +์—ฐ๊ด€ +600 +ํ”„๋ฆฌ๋ฏธ์—„ +๊ฐ„๋‹ค +##์ผ๋ฆฌ +๋ณด์™„ +##๋กœ์šด +๊ฐ•์‚ฌ +๋ชฉ์‚ฌ +์ฐฝ์˜ +##์กฐ๊ฑด +๋”๋ผ๊ตฌ์š” +๋ฌด์„œ +๋‚ด๋‚ด +๋ชจ์œผ +95 +์ง€ํ‘œ +๊ณ ๋ฌธ +์กฐํ™” +๊ฐ€์‹œ +์ง„ํ™” +ํƒ€๊ฒฉ +ํƒœ์–ด๋‚ฌ +๋ฌด๊ฒŒ +##์—ฐํžˆ +์‹ ํ•œ +์•„ํŠธ +์›ํ•˜ +์—ฌ๊ฒจ +ํ…Œ๋งˆ +์•„๋‹ˆ๋ฉด +ํ™˜์œจ +##๊นŒ์š” +์—๋‹ค +##ra +๋Œ€๊ตฌ์‹œ +FTA +๋„๋ง +ํ•œ์ • +๋Š˜์–ด๋‚œ +๊ฒจ์šฐ +##๋กœ๊ทธ +์ ‘์ด‰ +๋งŒ์ผ +๋Œ€ํ•™์› +##FA +๋ฐ”๋ฅธ +๊ต์œก๊ฐ +์ค‘๋Œ€ +๋ณด์ผ +##๋ ˆ์ด๋“œ +1995 +๋‚จ์ชฝ +๊ณตํ•™ +ํ•˜์šฐ์Šค +๋ณด๊ธ‰ +์˜ํ–ฅ๋ ฅ +์‹๋ฏผ +189 +##๋จธ๋ฆฌ +๋™๋งน +์‹ค๋‚ด +๋ฐ˜๋„์ฒด +ํ…Œํฌ +๋Œ€์ฒ˜ +์˜ค๋ฅธ์ชฝ +##th +์‹ ํ•™ +๊ฑด์กฐ +์ฝ˜์„œํŠธ +์œ ํ•™ +์ผ์–ด๋‚ฌ +๊ฐ„๋‹ดํšŒ +ํ…Œ๋Ÿฌ +ํฌ๋Ÿผ +๊น€์„ฑ +๋งž์ด +##์—๋„ +์„ ์ž„ +์ผ์น˜ +๋Œ€๋งŒ +##์ฒ ์ˆ˜ +##๋„๋กœ +๋„๋ฆฌ +์กฐ๋ก€ +๊ฐœ์„ค +ํ•ด๊ตฐ +62 +์—ด์ฐจ +๋๋‚œ +๋””์Šค +์–ด์ฉ” +๋ณ€ํ•˜ +์œ ๋กœ +๋…ธํŠธ +์• ๋‹ˆ +์ˆ˜์ˆ˜๋ฃŒ +๊ธด๊ธ‰ +์œ„๋Œ€ +์•„๋ผ +๋ถ€์‚ฌ +๋ฐ๋ฆฌ +๋Œ๋ฆฌ +๋„ค์ด +๊ตฌ์„ฑ์› +๋ถ€๋‹น +์ œ์ฃผ๋„ +์„ธ์›  +๋ผ๋ฆฌ +์š•๋ง +๋”๋ผ๊ณ ์š” +์ •์„œ +๋งˆํŠธ +์ฆ์‹œ +๊ถŒ์œ„ +68 +๋ถˆ๋ € +์ง€์ถœ +์ˆ˜์น˜ +์—ฐ๋ น +์ด์ • +๋ƒ๋Š” +์ˆ˜์ง‘ +์žฅ์‹ +๊ฐ์—ผ +์ฒœ์—ฐ +์ ˆ๊ฐ +์ขŒ์šฐ +์˜ํ˜ผ +๊นจ๋‹ฌ +๋ฎค์ง€์ปฌ +์ฐฝ์ž‘ +๊ฐœ์ •์•ˆ +๋Œ€ํญ +##์ฃผ์‹œ +๋ฐฐ๋ ค +ํ™•์‹  +๋ชปํ•œ๋‹ค +๋ฌธ๋ช… +๊ฐ„๋ถ€ +ํ๊ธฐ +๋“ค์–ด๊ฐ„ +์นจํ•ด +๋…ธ๋ฌดํ˜„ +์šฉ์ธ +IC +๋ถ๋ถ€ +์‚ฌ์•ˆ +##๋ ค์•ผ +๋ณ‘์‚ฌ +์ด์Šฌ +๋ฏธ์„ธ +๊ธฐ๋ฆ„ +์˜ค๋ฐ”๋งˆ +์ˆ˜๋ฆฌ +์‘์› +๊ตฐ์ธ +๋งˆ๋‹น +์ถ”์–ต +ํ‰์ฐฝ +์ฃฝ์ด +์œตํ•ฉ +๋ฐฐ๋‹น +์žฅ๋‚œ +๊ณ ๋„ +์‹ค๋ฌด +์ ์–ด๋„ +๋‹ด๋ณด +์™ธํ™˜ +์ž„๊ธฐ +์ด‰์ง„ +##์Šคํ†  +๊ทผ์œก +์œ ์ถœ +ํœด๊ฐ€ +์ฝ”์น˜ +ํ†ต์น˜ +๋œ๋‹ค๋ฉด +##์ง€์‚ฌ +๋…ธ์‚ฌ +์ž…๋ฒ• +๋†ํ˜‘ +1970 +์ƒ์ž„ +์†๊ฐ€๋ฝ +์ง„๋ฃŒ +๋ฌด์กฐ๊ฑด +##๋งˆ์Šค +๋ณดํŽธ +๋“ค์ด +๋งŒ๋‚  +๋Š๋ผ +1996 +ํ˜ธ๊ธฐ +๋ชฐ์•„ +ํŒจ๋ฐฐ +67 +๊ณตํ™”๊ตญ +์œ ์•„ +ํ˜‘์กฐ +๋…ผ์Ÿ +๋ฐ”๋žŒ์ง +๋œจ๊ฑฐ์šด +๊ฑด๋ฐ +๋ฏธ๋งŒ +##๋Ÿฌ์ง€ +์บ ํŽ˜์ธ +๋ฒค์ฒ˜ +๋Š์ž„์—†์ด +์Šค์ผ€ +์ด์ค‘ +##ab +ํŽธ๋ฆฌ +๊ตฌ์ƒ +์ž‘๊ณก +ํ™”์„ฑ +์ •ํ™•ํžˆ +์ œ์ž +##๋ฌธํ™” +๊ณ ๋ น +์ฒดํฌ +##๋ ˆ๊ธฐ +์˜จ๋„ +์™€์ธ +์‹ธ์šฐ +๋ˆˆ์น˜ +์ €์žฅ +๊ฑธ๋ ค +๋ฌด๋„ˆ +##๊ฐ€์Šค +๊ณ ์œ  +4000 +ํ˜„์•ˆ +์ฃผ๊ฐ„ +ํ†ต์ฆ +์ฃผ์ตœ +์ง‘๊ณ„ +๋„๊ตฌ +์ ์‘ +ํก์ˆ˜ +๋ธ”๋ฃจ +์„ ๋‘ +๊ณง๋ฐ”๋กœ +๊ตฐ์ˆ˜ +๋‹ต๋‹ˆ๋‹ค +ํ”Œ๋žซ +์–ด์จŒ๋“  +๋งˆ์ดํฌ +์‹ญ์‹œ์˜ค +์ด์Šค๋ผ +์ˆ˜๋ก +์ง€ํ˜œ +์†ํ•˜ +๋นผ์•— +๋Š๋‚€ +์„ ์‚ฌ +์ง๋ฌด +๋‹จ์† +๋Š”๋‹ค๋ฉด +์„ฑ๋ช… +๊ต์‹ค +๋“œ๋Ÿฌ๋‚ฌ +๊ธˆ๋ฉ”๋‹ฌ +์—˜๋ฆฌ +๋†๊ฐ€ +๊ทธ๋ž˜์š” +๊น€์ข… +๊ตญ๋‚ด์™ธ +๋‹น์‚ฌ์ž +์ต์ˆ™ +์†Œ์œ„ +ํ‚ค๋กœ +๋Œ€๋ฒ•์› +์ง€๋ฆฌ +๋ฏธ๋‹ˆ +๋ƒ๊ณ  +๊ทธ๋Ÿผ์—๋„ +ํ‘ธ๋“œ +๊ทธ์น˜ +์–‘๊ตญ +๋ฆฌ๋”์‹ญ +์ง€์ง„ +2019 +๋‹จ์žฅ +๊ธฐํ›„ +๋ถ์ชฝ +๋ฒŒ์–ด์ง€ +ํ•œ๋งˆ๋”” +๋ถ„์Ÿ +์•„๋‹™๋‹ˆ๋‹ค +์‰ฌ์šด +ใ… ใ…  +##๋ฐ”๋‹ฅ +์„œ๋ถ€ +๊ตฌ์ œ +์ด์Šค๋ผ์—˜ +##ED +์“ฐ๋ ˆ๊ธฐ +๋ผ์„œ +์—†์•  +์•ˆ์ฒ ์ˆ˜ +์˜ํ•ด์„œ +์—ฐ์„ค +๊ตฌ์ฒญ +์œ ๋ฐœ +ํ•ญ๋ชฉ +##๊ถŒ์ž +๋˜๋Œ +ํƒœ์–ด๋‚˜ +์œ ํ–‰ +์‚ฌ๋กœ +ํ”Œ๋Ÿฌ์Šค +๋ถ€ํ™œ +##์นด์˜ค +์‹ ์„  +์•„์›ƒ +๋น„์„œ +์—ด๋ ธ +๋ฌธํ™”์žฌ +์ฐฌ์„ฑ +์ˆ˜์ฃผ +์Šค์นด +๊ฐœํ†ต +ํ•œ๋•Œ +์ฆ‰์‹œ +##๋Ÿฌ์ง„ +๋‚จ๊ธฐ +1994 +๋ฒ•๋ฌด +์ฃผ์ž +๊ณ ์ƒ +์ค‘๊ณต์—… +๊ณ ๋ฐœ +๊นœ์ง +์ดˆ๊ณผ +์ ๋‹น +๊ฐ€๊ณ„ +์ด์ฃผ +์ง€์ผœ๋ณด +์ž๋ณธ์ฃผ์˜ +์˜ฌ๋ฆฐ +์‹ ๊ธฐ +##๋‚จ๋„ +74 +๊ฒฐ์˜ +์˜ค์ง +์„ ์ง„๊ตญ +kr +๋‚˜๋ˆ” +ํ‰์–‘ +์ƒ๊ฐ€ +์ทจ๊ธ‰ +๋น„๋””์˜ค +๊ฐ์„ฑ +์ŠคํŒŒ +๋ณต์› +800 +์†๋‹˜ +์ˆ˜๋ฐฑ +๊ตฌํ˜„ +์ฒญ์•ฝ +ํŒŒ์—… +์ง‘๊ถŒ +๋Œ€๊ฐ• +์ข…๋ฃŒ +๋„๋Œ€ +CJ +ํ™ฉ๊ธˆ +71 +73 +๋‚ด๋ฆฐ +์ „ํ›„ +##๊ฑด์„ค +##ia +ํ›ผ์† +์ƒ์‹ค +SNS +์ทจ์•ฝ +๋ผ๋ฉด์„œ +๊ตญ์ •์› +๊ธฐ์จ +์ผ์‹œ +##๋‹จ์ง€ +์ด์–ด์กŒ +ํ•ด๋ช… +๊ฑฐ์น˜ +๋ถˆ๊ฐ€ํ”ผ +๊ฐ•์  +๊ตฌ๋ฏธ +์ปจ์„ค +์„ ๋ณด์ด +120 +๋ช‡๋ช‡ +์‚ด์ง +##ea +์ผ์ž +์ดํ˜ผ +์ธ๋ฌธ +77 +๋„๋Œ€์ฒด +๋’ค์ง‘ +์ž…์ง€ +๊ทธ๋ ‡์ง€๋งŒ +๋™๋ ฅ +๋ณ€์ˆ˜ +to +๋‹ค๋ฆ„ +์š•๊ตฌ +๊ฒฝ์ฐฐ์„œ +๊ณ ๋ง™ +์žฌ๋Šฅ +##ew +##์˜์ƒ +ํ™•์‹คํžˆ +์žฌ๊ฑด +๊ตํ™ฉ +์‹ ์† +์–ด๋ ค์›Œ +์Œ์„ฑ +๋ฏผ์› +๊ฒฐ์‹ฌ +์ž๋ฌธ +์‹ ๋ถ€ +์ฃผํ–‰ +๊ฐ€๋ณ +์†Œํ”„ํŠธ์›จ์–ด +๋šœ๋ ท +๋ฉ”์ด์ € +๊ฐ€๊ณต +์กฐ๋‹ฌ +๊ฐ€์ƒ +์—ด๋ฆฐ๋‹ค +์ž…์ฆ +##๋‚˜์ด +์ตœ์†Œํ•œ +์Šคํฌ +๋ฐ€๋ ค +๋ฐ•๋žŒ +์ Š์€์ด +์Šคํ‚ค +๋ช…ํ’ˆ +##ํžŒ๋‹ค +๋น„์ • +๋“ ๋‹ค +์‚ฌ๋กœ์žก +##์Šคํƒ„ +ํ›„๊ธฐ +๊ธ‰์ฆ +๊ณตํ—Œ +์•Œ์•„๋ณด +SBS +ํŠธ๋ Œ +๊ณผ์ผ +์ถœ์‚ฐ +๊ฐ•์—ฐ +์ฐธ์กฐ +์„ ๋„ +๋™๋‚จ +์„ฌ์œ  +์‹œ๋ฒ” +์–ด๋‘ +์ค„์–ด +๊ณ„์‹œ +์—ฌ์„œ +##๋‚ธ๋‹ค +ํ˜„ํ–‰ +์ถ”์„ +ํŒŒ์›Œ +##์šฐ๋“œ +์„œ๋ช… +๋•Œ๋กœ +๋ฏผ์ค‘ +์žฅ๋ฅด +์žฌ์ƒ +๊ณผ๋„ +์—ฌ์˜ +์ข…์ข… +๋ฉˆ์ถ” +์†Œ์Šค +ํ•œ์ฐธ +์ƒ๋ฌด +์˜์›… +์•„๋ฉ” +##๋‹ค๋ฆฌ +๊ธ‰๊ฒฉ +ํŠธ์œ„ +๊ทธ๋Œ€ +์†Œํ˜• +๋ฐ”์œ„ +๊น€ํ•ด +๋“ค์–ด์™” +๊ณ ์–‘์ด +์•„์‹œ +##๊ฑฐ๋ ธ +๋‚˜ํƒ€๋‚œ +์—๊ฒ +##๋ž˜๋„ +ํ•œ์ง€ +ํ์‡„ +์ผ๋ฐฉ +์•„๋งˆ๋„ +์‹ฑ๊ธ€ +์ž…์‹œ +์›์ • +ํ•œ๋ฒˆ +GS +76 +##์ƒํ™œ +์ „์ฃผ +์žฅํ•™ +๊ธฐ๊ธˆ +์กฐ์ง€ +๊ทน๋‹จ +ํ•™ํšŒ +์‹œ๊ณ„ +์ด๋ฏผ +์‚ฌํšŒ์ฃผ์˜ +์ง€๋ƒˆ +ํ…Œ์ด๋ธ” +๋†๊ตฌ +๋‚จ๊ฒจ +ํœด์‹ +๋น ์ง„ +๋ชจ์ƒ‰ +##์˜จ๋‹ค +์•ผ๊ถŒ +ํ•ฉ๋ฅ˜ +๊น€๋Œ€ +##์‹œ์ฝ” +์นญ์ฐฌ +๊ตญ๋ฐฉ๋ถ€ +๊ฑฐ์ง“๋ง +๋กœ์Šค +##ใ…‹ใ…‹ใ…‹ใ…‹ +์—ฌ์„ฏ +์ตœ์šฐ +์˜์ž +์ฒญ๋ฌธ +์•„๋‹๊นŒ +๋ถ•๊ดด +##์‹œ๋‹ค +##์ •๋ถ€ +๋น„ํ–‰๊ธฐ +69 +๊ฑฐ์ ˆ +๋ฒ•์น™ +์•„์ € +์ž…๊ตฌ +์šฉ์—ญ +์กด์ค‘ +์Œ๋ฃŒ +๋ฆฌ๋ผ +์ฒœ์ฒœ +์ฝ”๋“œ +๊ฐ€๋ฐฉ +์ง„์˜ +๋”๊ตฐ +์ฃผ๊ธฐ +##iv +์–ด์šธ๋ฆฌ +88 +์ค‘๋ฐ˜ +์นจ๋Œ€ +์ •์„ฑ +์ง„๋ฆฌ +๊น€ํƒœ +๋‹ด๊ธด +์—ฐ๋ด‰ +๋งˆ์‚ฌ +๋Œ€๋ฅ™ +##ํ•ญ๊ณต +์ฒœ์ฒœํžˆ +์ด์šฉ์ž +์ข‹์•„ํ•˜ +##ion +์„ธ์šด +์ž„์ง์› +์ˆ˜์ง€ +๋ณต์ˆ˜ +๋ฏฟ์Œ +๋ถ™์—ฌ +์ง„์ฃผ +##๋ธŒ๋ฆฌ +ํ’ˆ๋ชฉ +์•„์ €์”จ +์…‹์งธ +ํ”์  +๊ฐ€์งˆ +ํ•„๋ฆฌ +๋™์˜์ƒ +ํ•œ๊ฐ€ +์˜์ž… +๊ฐ€๋งŒ +์˜ค์ŠคํŠธ +๊ฒฝ๋งค +๊ธฐ์ƒ +์ง€์—ฐ +##์Šคํ„ด +๊ณ ๋งˆ +ํ†ต๋ณด +๋ฏธ์นœ +๋ฐ•์ง€ +์•„์นด +6000 +๋Œ์•„์™” +์ด๋ฆฌ +์ž…์ฐฐ +๊ทธ์ณค +ํฌ๋ฅด +์—ฐ๋ฃŒ +##๊ทธ๋ฃน +์„ ์ „ +๋ฐฐ์ƒ +๊ฐ๊ตญ +๊ตณ์ด +์‹ ์ค‘ +cm +์ธ์—ฐ +๊ธฐ์šด +๊ตฌ๋„ +ํ”Œ๋ผ +์œ ์‚ฐ +##ard +์‚ฐํ•˜ +ํ•œ์ชฝ +##์–ด๋ฒ„ +ํƒ€์ธ +๋ฌผ๋ฅ˜ +๋ฐ๋ ค +๋งˆ๋ฒ• +์–ด์ฉŒ๋ฉด +์ „ํ•ด์กŒ +๋…ธํ›„ +์•„๋ฌด๋ž˜๋„ +๊ตฌ๊ธ€ +##๋ฉ”์ด์…˜ +๋Œ€๊ฐœ +์ง„์ง€ +์ถฉ์กฑ +์–‘์‚ฐ +๋ถ€๋”ช +ํƒ€๋‹น +์ถ•ํ•˜ +๋ฏธ์ˆ ๊ด€ +๋นŒ๋ ค +์žฅ์ˆ˜ +์•„๊ฐ€ +๋– ์˜ค๋ฅด +์ €๊ธฐ +๋„ˆํฌ +8000 +๊ตญ์–ด +##์›Œํ•˜ +์†Œ์™ธ +๋ณ€์‹  +์‹ฌํ•˜ +์‹œ๋‹ฌ +๋ณธ๋ž˜ +์กฐ์•ฝ +์˜ˆ๊ธˆ +##๋‚˜๋งˆ +์ˆœ์„œ +์ฐจ๊ด€ +์ž„๋ฌด +์–‘์ƒ +๋‹น๋ถ€ +์˜ฌ๋ฐ” +๋™์ž‘ +์„ฑ๋ฆฝ +์ฒญ์‚ฌ +์ž๊ตญ +78 +์š”๊ฑด +๊ธธ์ด +๊ณ„์ขŒ +1992 +๋ฌด์‚ฌ +๊ฐˆ์ˆ˜๋ก +์•ˆํŒŽ +๊ตฌ์› +๊ฐ€๋ฅด์น˜ +ํ…”๋ ˆ์ฝค +๊ตฌ๋ณ„ +ํ˜„ํ™ฉ +ํŠธ์œ„ํ„ฐ +์ง€๋„๋ถ€ +##ํŠธ๋ฆฌ +์ง•์—ญ +๊ธˆ์† +๋ฒ ์ŠคํŠธ +์ง„ํ•™ +์ผ€์ด๋ธ” +ํ™”์žฅ์‹ค +##๋œ๋ž€๋“œ +์“ฐ์ด +์‹œํ‚จ๋‹ค +๋“ค์–ด์™€ +๋ธ”๋ก +์ปค๋ฎค๋‹ˆ์ผ€์ด์…˜ +๋‘˜๋Ÿฌ์‹ผ +์Šน์ง„ +๊ณต์ธ +##์ฒœ์‹œ +๋ฉ”๋ฅด +๋™ํ•ด +๋น„์ •๊ทœ +์ƒˆ๋ผ +ํ™”๋ฌผ +์ผ๊ธฐ +๋„ค๋œ๋ž€๋“œ +์กฐํ•ญ +๊ธ‰์—ฌ +ํ•ด์•ˆ +์ ํฌ +์••๋ ฅ +๋ถ€์ž‘ +์—‘์Šค +ํŽธํ•˜ +700 +ํ‡ด์ง +๋ฐ›์•„๋“ค์—ฌ +์žฅ์•… +##ame +๋น„์ •๊ทœ์ง +๋Œ€ํ–‰ +์ฑ„๋ฌด +์›์ฃผ +๊ธฐ๋ฒ• +ํƒœ์–ด๋‚œ +๋ฐ•๋žŒํšŒ +๊น€์ •์€ +๋งž์„œ +ํƒ„ํ•ต +์šฐ์šธ +##๋””์…˜ +๊ณจ๋ชฉ +๊ณ ๋ฆฌ +๋งˆํฌ +์ƒ‰๊น” +๊ด€๋ฃŒ +๋ชจ๋‹ˆ +##ist +์ณ๋‹ค๋ณด +ํƒœํ‰ +๋ผ์š” +๋†’์—ฌ +๊ณต์ž‘ +๊ฐ„์ ‘ +๋ฌด์ฒ™ +๋ฌด์žฅ +์˜ˆ์„  +๋งˆ๋”” +์›Œ๋‚™ +์žก์ง€ +์ถœ๊ทผ +์น˜๋ฅด +1960 +์ค‘๋™ +์ง€๋‚˜๊ฐ€ +์˜ํ™”์ œ +์ด๋‹ +์—„๊ฒฉ +##๋ผ๋ฆฌ +๋น„๋กœ +ํ‰๋ฒ” +๋„๋‹ฌ +1993 +##ํผ์Šค +ํŒŒ์ผ +ํ™˜์˜ +๋ฌด์šฉ +์œก๊ตฐ +ํ•„์ž +์ „๋‹น +ํ–ฅํ•œ +์ฃผ์ฐจ์žฅ +๊ฒฝ์˜์ž +๋ฐœ์ „์†Œ +์„๊นŒ์š” +๋Œ€๋ชฉ +์™•๊ตญ +์•ˆ์‹ฌ +์ฃผ๋ถ€ +์žฌ๋ฐฐ +์ด๊ฑธ +์˜คํ”„ +##๋ถ๋„ +์˜์žฅ +๊ณต๋žต +๋„๋‚ด +##์ด๋ธŒ +98 +##๋ฆฌ์ŠคํŠธ +๋‹ด๊ฒจ +์ผ๊ด€ +์œ ๋™ +์ฑ™๊ฒจ +์œ ๋ ฅ +์ด์Šฌ๋žŒ +๋ช…๋‹จ +๋ ˆ๋“œ +์ผ์› +๋ณด์—ฌ์ค€๋‹ค +๊ตฌ๊ฒฝ +์ €๋Ÿฐ +์•ž์žฅ +##TV +ํ‘ธ๋ฅธ +๊ฐ€๋ฒผ +##ell +์ฒ ๊ฐ• +##์ฆ๊ถŒ +ํŒŒํ‹ฐ +##ep +##์–ด์ง„๋‹ค +๋ฉ•์‹œ์ฝ” +๋„์ค‘ +์‹ค์ • +๋ฐฉ์‚ฌ +๋„˜์–ด๊ฐ€ +๋ฒ ์ด์ง• +์ฐธ๊ฐ€์ž +์ค‘๋„ +02 +ํŒจํ„ด +์ฃผ์„ +์„ธ๋ถ€ +์˜๋‚จ +๋น„ํƒ€ +##50 +๊นจ๋‹ซ +๋– ๋‚ฌ +##์‚ฐ์—… +๋งค์ถœ์•ก +๊ฐ•์š” +##ํšŒ์˜ +๊ฐ€์‚ฌ +๋“œ๋ฆด +๋งˆ์นœ +์ง„ํฅ +ํ›„๋ฐฐ +์œ ์น˜์› +๊ฒฉ์ฐจ +๊ด€๋… +๊ท€์กฑ +๊ทธ๋ฆ‡ +์ฒด์œก๊ด€ +ํ•™์ˆ  +์ข…์‹œ +ํœด๋Œ€ํฐ +๋™์ชฝ +##์‹œ์žฅ +##๋””์•„ +์žฌ๊ฐœ +๋ฒŒ์˜€ +์ „์‹œํšŒ +๊ต์œก๋ถ€ +์ฒดํฌ +์ŠคํŠœ +๋กœ๋“œ +์šฉํ’ˆ +์ž…์ˆ  +์ถฉ์‹ค +์„ญ์ทจ +AI +188 +๊ณ ๊ตฌ๋ ค +##EC +๋ ์ง€ +ํ™˜์ƒ +๊ฐค๋Ÿญ +ํด๋ž˜์‹ +๋ถ™์žก +##ver +ํ•ด๊ณ  +๋ณดํ—˜๋ฃŒ +๊ฐค๋Ÿญ์‹œ +์ดˆ๋ž˜ +๋ฏผ๊ฐ +์˜คํ•ด +ํƒˆ์ถœ +๋™ํ™” +์žฌ๋ฒŒ +๊ณต์ œ +##์Šคํ‚ค +##ff +##ill +๋ฒ ๋ฅด +##๋ฉ์ด +์ƒ์ƒ +์ด๋ฃฌ +์‹ฌํ•œ +์œผ๋ ค๊ณ  +##30 +ํ•˜์—ฌ๊ธˆ +๊ณผ์„ธ +์Ÿ์•„ +์—ฌ์ง€ +๊ฐ€์š” +ํŒŒํŠธ๋„ˆ +์น˜์ฆˆ +์ด๊ด„ +##์šด๋™ +๊ณ„๋‹จ +์‹ ์ธ +์‹ญ๋‹ˆ๊นŒ +์ค‘๊ตฌ +๋ฐ•์Šค +์ „์„ค +๊ฐœ์ฒ™ +ํ‘œ๋ฉด +##ํ•˜์—ฌ +##PG +์†์ƒ +ํ…Œ์ŠคํŠธ +82 +๊ท€๊ตญ +๋น„ํ‰ +KIA +์†Œ์š” +์ฒด๋ ฅ +ํƒ„์†Œ +ํ–ฅํ•˜ +ํ•™์œ„ +์œ ์ „์ž +์นจ๋ฌต +๋Œ€๋Œ€ +์ค„์—ฌ +๋„˜์น˜ +๋ถ€๊ฐ +๋งˆ๋•… +์• ๋‹ˆ๋ฉ”์ด์…˜ +180 +์ •์ˆ˜ +์„ ๊ต +์Šค์›จ +์…”์„œ +๊ด€์„ธ +##๋ ˆ์ธ +๋ฉ”๋ชจ +๋‚˜์„ค +์ถฉ๋ถ +๋ฐ”๊นฅ +๋‚ ์•„ +81 +##IS +์šฐ์œ  +์„œ์ชฝ +์‚ดํ•ด +##tic +7000 +๋ทฐํ‹ฐ +ํ‹€๋ฆผ +๋‘๋“œ +์‚ฌ๋“œ +์œ ๊ถŒ์ž +์–ด๋ฆฌ +์‹ ์ž… +ํ™”์ดํŠธ +์ด๋ฅธ๋‹ค +ํ™€๋กœ +์‚ด๋ ค +๊ธฐ์›์ „ +##์ด์žฅ +ํ•ด์ƒ +์ด์   +๊ณ„์ ˆ +1991 +๊ฐœํ—Œ +์ •์ž‘ +๋ช…์„ฑ +๋‹ค์Šค +๋Œ€๊ฑฐ +##CC +์ž‘๋™ +##์‹ฑํ„ด +st +๋ถˆํ–‰ +์ƒ์‚ฌ +PD +๋žœ๋“œ +์นด์นด์˜ค +์ ๋ฐœ +86 +์˜๊ด‘ +์—”ํ„ฐ +์ „์„  +##๊ฒŒ์š” +๊ณต์ค‘ +ํ™•์ถฉ +##ํ”ผ์•„ +ํ•ฉ๋™ +##๋“ค์ด +์œ„์ƒ +๋จน์Šค +์ „ํŒŒ +ํ•œ์ธต +์ ๋ น +์˜จ๊ฐ– +์‹ค๋ง +##์ปจ๋Œ€ +์˜๋ขฐ +์กฐํ•ฉ์› +๋ฐ˜์„ฑ +์˜ํ†  +##๊ฑฐ๋ž˜ +์—ญ์ „ +์ž„์‹  +ํ™•๋ฅ  +ํฌ์ŠคํŠธ +ํŽ˜์ด์Šค๋ถ +๋“œ๋””์–ด +์–ด๋‘  +์ง„์‹ฌ +์ „์ง€ +์œ ์ž… +๋ชฉ๋ก +์œผ์‹œ +๊ต์žฅ +๊ฐ€๊น +๋ชจ์•˜ +์‹œ์ • +๋ถ€๋„ +๋ถ„์—ด +์ง•๊ณ„ +ํ•˜์œ„ +์ง„๋กœ +์ €์ž‘ +๋ฐฐ๋‹ฌ +##est +๋งค์ฃผ +๋ˆˆ์•ž +##๋กœ๋“œ +ํ•ด์ฒด +๊ฐ•๋„ +ํ˜•์ƒ +๋Œ€์ƒ์ž +์ถ”์  +๋‚ด๋ ค๊ฐ€ +์ˆ˜๋ถ„ +##์ˆ˜์ˆ˜ +ํ•ด์งˆ +79 +๊นŒ๋‹ญ +๊ทธ๋ถ„ +์Šคํ‹ฐ +์Šค๋Ÿฐ +ํƒœ๊ตญ +์–ต์ œ +๋™๊ตฌ +๋™๋ถ +๊ฒฝ์ฐฐ์ฒญ +์ƒํ™˜ +์–‘์ž +์‹๋ฏผ์ง€ +์‚ฌ์ด๋ฒ„ +##์•„๋ฆฌ +๋ฐ”์ด๋Ÿฌ์Šค +ํŠน์œ  +์˜ˆ์™ธ +๋น„๋กœ์†Œ +๋Œ€๋Ÿ‰ +๋ฌด์–ธ +ํ˜ธ์ˆ˜ +์„ฑ์ทจ +์ผ๋ณธ์ธ +์œก์ฒด +๋˜๊ฐ€ +์ฆ‰๊ฐ +##ak +์™œ๊ณก +๊ด€ํ•  +๋งค๋‹ˆ +์‹œ๊ธ‰ +์˜คํ”ผ์Šค +##ํฌ๋ฅด +##ํ†จ๋ฆญ +์ปจ์„คํŒ… +์œผ๋ฆฌ +๋ด๋„ +์™„๊ณต +๋„์•ฝ +์ด์ข… +๋งˆ๋ผ +87 +๋Œ€์ฒด๋กœ +์ค‘ํ™” +ํ•ด์ œ +๊ทœ๋ช… +์—ญ์ž„ +##๋ฆฌ์ฆ˜ +ํƒ‘์žฌ +๊ธฐ์žฌ +FI +ํ„ฐ๋ฏธ +๋ชจ๋‹ˆํ„ฐ +์ฒ ์ €ํžˆ +ํ—ˆ์œ„ +์ด์  +1988 +์–ธ์   +##uc +๋น„ํƒ€๋ฏผ +์••๋„ +##๋ฐ”์ด +202 +์›Œ์‹ฑํ„ด +์‹ฌํ™” +๋ง‰๋Œ€ +์ƒ๋‹น์ˆ˜ +##๋ฆฝ๋‹ˆ๋‹ค +๋นŒ๋”ฉ +##eg +์š•์‹ฌ +๋‘๋ ค์›€ +๋””์Šคํ”Œ๋ ˆ์ด +์‹ ์„ธ๊ณ„ +##all +์ฒ˜์ง€ +๊ฐ์ƒ +๋ฌด๊ด€ +๋ณธ์„  +๊ฐ„์ฃผ +96 +์Šค์œ„์Šค +์šด์†ก +ํŒ์ • +ํƒ„๋ ฅ +์˜ฎ๊ธฐ +๊ณต์‹œ +์กด๊ฒฝ +##์ ธ์•ผ +๊น€๋™ +๊ตฌ๋ฉ +##๋กœ์ง€ +๋…ธ์˜ˆ +๋‹นํ™ฉ +##์–ด์งˆ +##ip +##ram +์‹ ๋ฐœ +์ด›๋ถˆ +๋‹น์ผ +๋ด…๋‹ˆ๋‹ค +ํ•ด์ง„๋‹ค +๋ฐฉํ•™ +์ž์‹ ๊ฐ +๊ฐ€ํ•ด +๋…์žฌ +๊ฟˆ๊พธ +์บ ํผ์Šค +์„ ๊ฑฐ๊ตฌ +์„ฑํญ +##๋„ค์‹œ์•„ +์ถฉ์ „ +๋‚ด๋ฉด +์˜ค๊ฐ€ +##ํ•˜์šฐ +์›์ž๋ ฅ +๋ฒ„์ „ +๊ณ ๋ฐฑ +co +๋‹น๋‹น +ํŽ˜๋ฅด +๊ผฌ๋ฆฌ +์‹œ์ƒ +๋ถ€ํ•˜ +๊ด‘์ฃผ์‹œ +์ฒญ๋ฌธํšŒ +ํด๋ผ +์ง€์ง€์œจ +์ „์—ญ +์ˆ˜๊ธ‰ +97 +๋Œ์•„์˜จ +##๋ ค์„œ +๋น„๋ก€ +##ob +ํšŒ์ „ +ํ† ๋ก ํšŒ +์ฐธ์‚ฌ +๋†“์—ฌ +๋“ค๋ฆฌ +์–ด์šฐ +์žฌํŒ๋ถ€ +๊ณ ์ˆ˜ +##์‚ฌ๊ณ  +๊ฐ€์ž…์ž +CC +๋น ์กŒ +์›๊ณ  +##๊ธด๋‹ค +EU +์ƒ๊ณต +์ฐฉ์ˆ˜ +์ตœ์  +ํฌ๋„ +๋ชจ๋ž˜ +๊ธˆ๋ฐฉ +์‹ ๋น„ +ํ„ฐํ‚ค +##์–ด๋†“ +์˜๊ฒฐ +๊ฐ„ํ˜ธ +๋ฎค์ง +์•ž๋‘” +๊ฐ€์† +๋„˜๊ฒจ +๊ณผ๊ฐ +์‹œ๊ณจ +์ค‘์  +##์ง‘ํŠธ +์—ฌ๊ฐ +00 +์ด๋„ +์œ ํ•ด +์–ด๋ ค์šธ +์™ธ๋ฉด +์šฐ๋ฆฌ๋‚˜๋ผ +๋ฐฉ์˜ +##๋„ˆ์Šค +๋ฉ‹์ง„ +๊ฐ€๋ฌธ +๋‚ด์ˆ˜ +์žฌ๋‚œ +์„ ๋ณด์˜€ +์ด์ง‘ํŠธ +##ang +91 +๋ฏธํ„ฐ +๋ฌธ์˜ +##ํƒ€์ธ +์ž์„ธํžˆ +ํ•œ๊ธ€ +๊น”๋” +1989 +ํฌ์Šค +##ine +๋ณด์ „ +์˜คํŽ˜ +์ค„๊ธฐ +ํƒ€์ดํ‹€ +##tt +์•„๋ฆ„๋‹ต +##ok +๋ฐ•์ˆ˜ +##ub +๋†์‚ฌ +๋™๊ณ„ +๋ผ์ดํ”„ +์ฆ๋Œ€ +๊น€ํ˜„ +๋น„ํ•˜ +์ถœ์žฅ +๊ทธ๋‚˜๋งˆ +์„œ์ˆ  +์œ„์„ฑ +๊ทน๋Œ€ +์†Œ์Œ +๋‚˜ํƒ€๋‚œ๋‹ค +๋ฆฌ์กฐ +์น˜ํ‚จ +ํ”„๋ผ +๋Œ€์™ธ +์ฆ๊ฒ +๊ธ€์ž +๋‘˜๋Ÿฌ์‹ธ +ํšŒ๊ด€ +ํ„ฐ๋ฏธ๋„ +๋…ธ๋ฆฌ +์‹ ์žฅ +๋งค์น˜ +ํ•„๋ฆฌํ•€ +์ฃผ์†Œ +ํŠน๋ณ„ํžˆ +๋“ค์–ด๊ฐˆ +์˜ค๋žœ๋งŒ +๊ด€์ ˆ +์„ค๋ฌธ +๊ตญ๊ฒฝ +##๋‹ค์ˆ˜ +๊ฐ–์ถฐ +๊ฐ•๋ฆ‰ +ํŠธ๋ Œ๋“œ +์ง€๋ถˆ +๊ณ ์‚ฌ +๋Œ€๊ธˆ +ํ’€์ด +๊ณ ๋ฅด +์•„์‹œ์•ˆ +##๋ณธ๋‹ค +๊ธฐ๋Œ€๊ฐ +##art +๊ฑธ๋ ธ +๊ณต์ง +##์‹œ๋งˆ +##our +ํ—ˆ๋ฝ +์ œ์กฐ์—… +๋จน์ด +##์ €ํžˆ +์—ฐ์ƒ +๊ตฌ๋ฆ„ +์ฑ„์›Œ +์†Œ๊ธˆ +##๋‹จ์ฒด +์‚ด์•„๋‚จ +83 +์‚ฐ์ฑ… +์ค€๊ณต +์›๊ฐ€ +๊ณต์—… +IM +##๋ž‘์ด +์ง„์ƒ +ํ•œ๊ฐ• +์–ธ์  ๊ฐ€ +์œ ์—ฐ +89 +์‹ค์‹œ๊ฐ„ +ํ˜๋ฆฌ +๊ฒฝ๋กœ +##๊ฐ€ํฌ๋ฅด +๋†์žฅ +์•Œ๋ ‰ +MB +๊ธฐํ˜ธ +ํ•œ์ˆจ +์—ฐ์•  +์žฌ๋ฐŒ +##์˜์ž +##๋ฒˆ์— +์—ฐ์˜ˆ์ธ +ํƒ๊ตฌ +๋™์ฐธ +์ง์ž‘ +์ž๋ฐœ +์˜คํ†  +๊ทธ์•ผ +์œ„๋กœ +์•„๋‚˜ +์Šค์ฝ” +์ค‘์‹œ +์‚ฌ๋ƒฅ +์šฉ์„œ +๊ฑด์ „ +ํ•œ๋ฏธ +์ „์‚ฌ +์‹ฑ๊ฐ€ํฌ๋ฅด +๋…์  +๊ธฐ์•„์ฐจ +##๊ด€๋ฆฌ +๊ธˆ๊ฐ• +์ฐฉ์šฉ +ํ–‰๋ณด +์ˆ˜์ฒœ +๋Œ€์› +ํ•˜์–€ +์ผ๊ณฑ +##ํŠธ๋กœ +๋ฐ˜๋ฐ• +์ž‰๊ธ€ +์ค‘๋… +์ตœ์ƒ +์ƒ์Šน์„ธ +92 +์‚ฌ์—…๋น„ +์œ ์šฉ +์ง€๋ถ€ +์‡ผํ•‘๋ชฐ +์žก์•„ +์ถœ๊ฐ„ +์†Œ์ง€ +์ง€์นจ +๋ณด์œก +์‚ฌํ›„ +๋ถ€์‚ฌ์žฅ +์ฆ๊ฑฐ์›€ +๊ผผ๊ผผ +##ํ‚จ๋‹ค +ํ•ธ๋“œ +์›Œํฌ +๊ท€์—ฌ +์ฝ”๋„ˆ +ํ˜ˆ์•ก +ํฌํŠธ +์‹๋‹จ +๋„๋ฏผ +์—ฐํ‰ +์ž…๋‹ˆ๊นŒ +ํญํ–‰ +ํƒˆ๋ฝ +ํƒ€์ด์–ด +์ธ๋ฐ์š” +1987 +์ž์กด +์–‘์ชฝ +์„ฑ์‹ค +๊ฒฐ์„ฑ +##์Šคํ‹ฐ๋ฒŒ +์ผ€์–ด +ํŽ˜์Šคํ‹ฐ๋ฒŒ +๋‹ต๋‹ต +์•ˆ์–‘ +์ด์–ด์งˆ +๋ชจ๋ฅผ +๊ฐ€์ „ +์‹ ์ž„ +์—ฐ์ธ +๋‹น์› +์•„๋ผ +๋‹ค๋ฅผ +ํ˜•ํŽธ +๋ถ€์ž‘์šฉ +๋‹ฌ์•„ +๋ฉด์„ธ +์ฃผ๋ฅ˜ +์ƒ์ž +์ง€์ธ +๊ณ ์ง€ +๊ณ ์‹œ +##์ ธ์„œ +๋ฌด๋ ฅ +๊ฑฐ์นœ +๋ด์š” +์†Œํ™˜ +์‚ฐ์—…๋‹จ์ง€ +94 +์•ž์„ธ +์ผ๊ฐ +์ฃผ์—ฐ +์ •๋ฉด +Pr +์ƒ๊ธˆ +๋ฐ์ผ๋ฆฌ +ํ‘œ๊ธฐ +์ฐจ๊ธฐ +ํŠน๊ฒ€ +๋Œ์–ด์˜ฌ +๊ณต๊ณ  +์ธ๋„๋„ค์‹œ์•„ +๋ถ™์ด +ํŠนํ™” +##์•„์ด +๊ธฐ์˜จ +์Šค์Šน +๋ชจ์ˆœ +์• ์ • +๋…ธํ•˜์šฐ +##์†Œ๋“ +์˜ˆ๋Šฅ +์‹œ๋‚˜ +์˜คํŽ˜๋ผ +์š”์› +๊ฑด๊ฐ€ +๋ถ€๋“œ๋Ÿฝ +์Šค์›จ๋ด +##ud +์ž…์‚ฌ +250 +๋– ๋‚œ +๊ทธ๊ฐ„ +๋ถ€ํŒจ +##๋“ ์ง€ +์‹œ์ง‘ +๋ถ€๋ฆฌ +##์ผ๋žœ๋“œ +##TE +ํšŒ์ˆ˜ +๋งˆ์‚ฐ +ํ–‰์ •๋ถ€ +ํ‰ํƒ +์˜ฌ๋ฐ”๋ฅธ +๋†์‚ฐ๋ฌผ +์‚ฌ๋ผ์กŒ +๊ฐ€ํ†จ๋ฆญ +๊น€์žฌ +๊ฒฉ๋ ค +๋ถ€๋ฅธ๋‹ค +์ƒ์‹ +์ธ๋”” +๋จผ์ง€ +##๋ฐ”์ง€ +๊ฐ์˜ค +๊ฐ€๋งน +์šด์ „์ž +์•ˆํƒ€๊นŒ +๋‚œ๋‹ค +๋ฆฌ์–ผ +์‚ฌ์šฐ +๊ธ‰์‹ +๊ฐˆ๋ผ +##๋ฐ›์นจ +๋ฆฌ์กฐํŠธ +์ŠคํŽ˜ +๋”๊ตฌ๋‚˜ +์šฉ์‚ฐ +ํ•ญ๊ณต๊ธฐ +๋ฐฐํ„ฐ๋ฆฌ +์‹œ์ค‘ +์ฐธ์œผ๋กœ +##์ด์ฆˆ +ํ–‰์ง„ +๊ถŒ๊ณ  +๊ฒŒ์‹œ +๊ตญ๋‹น +๋ถ„์‚ฐ +๋ถ๋ฏธ +์ˆ˜๋ฉด +์ง€๋‚ธ +์•ˆ๋™ +##๋Š๋‹˜ +๋ผ์„œ +##๋‘˜๋Ÿฌ +์ฆ์–ธ +ํŠน์ด +์•„์ดํฐ +๋„๋• +๋ฏธ์น  +##๋ ‡๊ฒŒ +๋น„๋Œ€ +๊ธˆํ˜ธ +๋ฌด์„  +๋นจ๊ฐ„ +๋™์„œ +๋Œ์•„์™€ +์ผ์–ด +์ •๋ฐ€ +์ด๋ชจ +์ถœ์ƒ +๋‹นํ•ด +๋†’์˜€ +์ด๋“ฌ +์–ด๋ฅด +ํ•ด์ˆ˜ +kg +๊ทธ๋ฆฌํ•˜์—ฌ +์‹ ์‚ฌ +์‚ฌ๋ฌด์†Œ +์›์ฒœ +์—ฐํœด +๋งˆ์ณค +๊ฐ์ž +์‚ฌ๋ฆฝ +๋‚จ์•„ +๋›ฐ์–ด๋‚˜ +์Šน๊ฐ +๋‚จ๊ฒผ +๋Œ“๊ธ€ +์Šฌ๋กœ +๋งค๋„ +์‚ฌ์ž +93 +์ฐฝ๋ฆฝ +ํ•˜๋Š๋‹˜ +ํƒ€์›Œ +์Šฌํ”” +์ฒ ๊ฑฐ +๋งฅ๋ฝ +๋Œ์•„๋ณด +๋ชจ์‹œ +๋‚ ๊ฐœ +ํƒœํ’ +๊ฑธ๋ฆฐ +ํŒจ๋Ÿฌ +ํƒ€์šด +name +์˜ˆ์˜ +IS +์ฆ๊ถŒ์‚ฌ +๋ณ‘๋ ฅ +๋˜์ฐพ +๋„˜๊ธฐ +๋น„์„œ๊ด€ +๋„์ฒญ +๋ˆ„๋‚˜ +์–ผ์Œ +๋ชจํ„ฐ +th +๋‹จ๋ฐฑ +##NA +์ค‘๊ณ„ +##๋ฏธํ„ฐ +๋‹ฌํ•œ๋‹ค +์ผ์ง€ +๋ฐœํšจ +๋‚˜๊ฐ„ +์ฒญ์‚ฐ +์ธ์ฒœ์‹œ +๊ฐ€์ด๋“œ +ํ”ผ๋กœ +๋ฐ•์ •ํฌ +9000 +์žฅ์ฐฉ +๋‹๋ณด +์‹œ๊ณต +๋น„๊ทน +๋Œ€ํ•ญ +๊ทธ์•ผ๋ง๋กœ +ํ”„๋ฆฐ +๊ฐ€๋งŒํžˆ +์ด๋“ฌํ•ด +๊ธฐ์šธ์ด +์ตœ์•… +๋ช…์‹œ +ํ•ญ์˜ +์ƒ์ผ +ํด๋ฆฐ +๊ฑฐ์šธ +์„œ์„œ +๋„์„œ +์„ฑ์งˆ +๊น€์ง„ +์‚ฌ์ฐฐ +ํ”Œ๋žœ +๋น„์‹ผ +##ใ… ใ… ใ… ใ…  +๋งŒ๊ธฐ +๋‹จ์› +์œ ์ „ +187 +๋ฐฑ๋งŒ +186 +๋ฐ”๋‹ท +##์›Œ๋“œ +๊น€์ƒ +์Šคํƒ  +##๋ฐฐ์šฐ +ํ˜‘๋™ +๋“œ๋ฆผ +์™•์กฐ +๋ถ€์‚ฐ์‹œ +์ด์žฌ +๋ช…๋ฐฑ +์‹คํƒœ +๋ƒ‰์žฅ +์‚ฌ์ธ +์นœ์ ˆ +ํŽธ์ž… +๋ชฝ๊ณจ +##ev +๋ฉ”์ดํฌ +๋Š˜์–ด๋‚ฌ +๋Œ€๋‹ค์ˆ˜ +๋„๋ฐœ +๊ธ‰์† +์„ธ๋ฏธ +๊ฑด์ง€ +ํŽ˜์ด +ํ™”ํ +๋ฌผํ’ˆ +์šฉ์ง€ +##๋ฃจ์…˜ +๋ณด์กฐ๊ธˆ +์•„ํ”ˆ +๋‹ค๊ฐ€์˜ค +์ด๊ฒจ +๊ทธ๋ผ +๋ˆ„์  +๊ฑธ๋กœ +๋‚ด๊ฐ +์—ฌ์˜๋„ +์กฐ๋ฆฌ +๋”ฐ์ง€ +์ด๋งˆํŠธ +ํ•œ์ฐฝ +๊ฐ€๋ ค +๋ˆ„๋ฅด +๊ณจํ”„์žฅ +๊ณ„์Šน +์‹ค์ข… +์‹ฌ์ • +๋จธ๋ฆฟ +๊ฐ€๋ฅด์ณ +๋์œผ๋ฉฐ +๋‚ด์ง€ +์–ด๋ฆด +์‹ค์ฒด +์„ธํƒ +##์–ด๋‚ด +๊ฐœ์†Œ +๋ˆˆ๋น› +์‚ฌ์—ฐ +์ฝ”ํŠธ +์ด๋Ÿด +์Šคํ”„ +๋งˆ์ผ“ +์ธํ…Œ +๊ฐ•๊ฒฝ +์–ด๋จธ +์ผ๋ฐ˜์ธ +์‹ค์šฉ +์กฐ์šฉํžˆ +์ฃผ๋จน +##igh +๊ด€ํ–‰ +๋ฉด์—ญ +ํ”Œ๋ž˜ +๊ธฐ์กฐ +์น˜์œ  +๋ผ๋‹ˆ +๋ฐ”๋ณด +ํ—ค์–ด +์‹ค๋ฆฌ +๋‚ฎ์ถ” +๋ฌด๋ค +๋‹ค๋ฃฌ +์˜ˆ์œ +##๊ฐ€์šด +์ด๋ฃจ์–ด์ง„ +##๋“ ๊ฐ€ +##ail +๊ณผํ•™์ž +๋‹ฌํ–ˆ +์ „๋‹ด +์ค‘๊ตญ์ธ +ํ™•๋ฆฝ +์ฃ„์†ก +๋Œ€์ฃผ +##๋งˆํฌ +์–ด๋ฅด์‹  +์™•์ž +๋‚ฉํ’ˆ +ํ•ฉ์ณ +##ํ’€์ด +ํ™”ํ•ฉ +๋„ค์ด๋ฒ„ +์ฑ™๊ธฐ +03 +์„œํ•ด +๋‚ฉ๋ถ€ +๋”ฐ์œ„ +๋ฐฑ์ œ +๋‚˜๋ž€ํžˆ +์ตœ์‹  +##๊ตฌ๋ฆฌ +๊น€๋Œ€์ค‘ +์ „์  +๋””์ž์ด๋„ˆ +์ค„์–ด๋“  +์ž‡๋”ฐ๋ผ +๋‹จ์ฒด์žฅ +์ผ์š”์ผ +##์–ด๋ฆฌ +##์ฒ˜๋Ÿผ +์ฒ ์ˆ˜ +์ œ๊ณ  +์ž์‚ฌ +๋Œ์ž… +์ž…์› +์ฝ”์Šคํ”ผ +IP +์‹ ์„ฑ +์ตœ๋‹ค +์ฐจ์žฅ +์น˜๋Ÿฌ +์ž‰๊ธ€๋žœ๋“œ +๊ด€์—ฌ +ํ•™๋ ฅ +1950 +GM +๋ถ„ํ•  +์ถ˜์ฒœ +๋ฌด์–ธ๊ฐ€ +##๋Š๋ฆฌ +์ฆ๊ฑฐ์šด +์กฐ๊ตญ +๊ฑฐ๋ž˜์†Œ +์ƒ์„ฑ +ํƒ์›” +๋ณด์ธ +ํฌ๋ฃจ +๊ณค๋ž€ +##ik +๋ฌด์‚ฐ +ํŽ˜์ด์ง€ +๋…น์Œ +์• ์ดˆ +์•„๋ฆ„๋‹ค์›€ +๊น€ํฌ +๋ถ„๋‹น +ํ•œ๋„ +๊ฐ์ถ” +๋’ท๋ฐ›์นจ +๊ตญ์„ธ +๋ชจ์€ +์ƒ์ƒ +๋ช…์ ˆ +ํ•จ์œ  +์•„ํ”” +์•„์Šค +๊ฐ€๋ฆฌํ‚ค +##๋ฒ„ํŠธ +ํ˜„์—ญ +์„ฑ์„œ +์ธ์ข… +๋‹จ๋‹จ +์ˆ˜๋ ด +##ํ‚ค์ง€ +์œ„์ถ• +์œ ๋ž˜ +ํ›„ํšŒ +์ฃผ์žฌ +์šฉ๋„ +์ฑ„์†Œ +์ถœ์„ +์ฆ๊ฒจ +์ถ”๋ชจ +์ „๋ฌด +์ฆ์ง„ +์œก์ƒ +๋‹จ๋ฐฑ์งˆ +##๋ฐ”๋žŒ +์•„์‰ฝ +์œ„ํƒ +05 +์ผํ–‰ +ํƒœํ‰์–‘ +๊ฑฐ์  +์ฑ…์ƒ +##์žก์ด +ํฅ๋ถ„ +๊ตฌ์„ +๊ณ ์ง‘ +๋‡Œ๋ฌผ +์›”๊ธ‰ +์ฑ„์šฐ +๋งŒ๋งŒ +##๋ฐ๋ฏธ +๋“ค์–ด์˜จ +์ ‘์† +ํญํƒ„ +๋ฉ€ํ‹ฐ +์™ธ๋ชจ +๊ณ ์ถ” +๋ฐฉ์น˜ +์ฒœ์–ต +ํ•œ๋‘ +์†”์งํžˆ +๊น€๊ฒฝ +์ฒ ํšŒ +ํŒจํ‚ค์ง€ +๋”์šฑ์ด +๋ถ€๋“œ๋Ÿฌ์šด +##์‹œ์Šค +mm +์ฐจ๋ผ๋ฆฌ +์‚ฐ๋ฆผ +๊ตญ๋ฉด +์ฃผ์ € +๋†“์น˜ +๊ณจ๋ผ +์œ ํ˜น +์•„๋ฒ  +๊น€๊ธฐ +๋ถ€๊ฐ€ +๋ชจ๋ฅธ +๋ฆฌ์ŠคํŠธ +๊ต์„ญ +ํ”๋“ค๋ฆฌ +##โ”€โ”€ +์„œ๊ธฐ +##๋Œ€๋ฌธ +๊ฒจ๋ƒฅ +๋“œ๋Ÿฌ๋ƒˆ +์ด์ˆ˜ +์ƒ์† +๋ถ€์  +์ฑ…์ž„์ž +์—ฌ๊ถŒ +๋‹น๋ถ„ +์žฅ๋ก€ +๊ณจ๋“œ +ํƒœ์–‘๊ด‘ +์•„์นด๋ฐ๋ฏธ +or +๋ฒ„๋ ค +##๋Ÿฌ์ ธ +๊ถ๊ทน +์„ฑ๊ฒฝ +NC +์ •ํ†ต +ํ”ผ๊ณ  +์˜๋ฅ˜ +##ich +์„ ๋ณด์ธ๋‹ค +๊ฐค๋Ÿฌ๋ฆฌ +์ฐจํŠธ +๊ธฐ์  +์–ด๋ผ +๋ฒŒ์–ด์กŒ +์‹ฌํ•ด +๋น„์‹ธ +๋ฐœ๊ธ‰ +##๊ฐ€๋ฃจ +์Šน๊ฐ• +์ ˆ์‹ค +์ด์น˜ +ํ–ฅ๊ธฐ +๋ฐฐ์šด +์œ ์  +๊ฑด์˜ +๋ฉ”์ดํฌ์—… +์—ฐํ•ฉํšŒ +##ํ…Œํฌ +900 +๊ทธ๋ฆผ์ž +๊ธฐ์ˆ ๋ ฅ +์›๋ฃŒ +ํŒ๋งค๋Ÿ‰ +์ธก๊ทผ +๋‹ด๋‹น์ž +##๋“œ๋กœ +์—ฐํŒจ +์ ˆ๋ง +##์–ด๋„ฃ +ํฅํ–‰ +์‹ค์  +๋Œ€์ง€ +๊ฐ์ถ• +์ฒญ์ฃผ +๋ฑ…ํฌ +ํ•ฉ์„ฑ +##์„ฑ์žฅ +์ƒ์•  +๋น›๋‚˜ +##๋จผ์ง€ +์ฃผ์‚ฌ +๋‚ด๋ฐ€ +์ ์ • +์ด์˜ +์žฌ์› +##ue +์ŠคํŠœ๋””์˜ค +๊ณ ์กฐ +์ƒ์˜ +์ˆ™๋ฐ• +์ธํ…Œ๋ฆฌ์–ด +์ž์นซ +์ˆœ์ด +ํ†ต์žฅ +ํ‘œ๋ช… +๋งˆ์Šคํ„ฐ +๋†€๋ผ์šด +์‹œ์„ธ +๊ฐ€๋ผ +์ „์‹  +์ด๋ฅผ +์žฌ์ง +๊ฐ•์„ธ +๋ฒ”ํ–‰ +FA +์˜ค์ผ +๋ฏผ์‹ฌ +์ƒˆ๊ฒจ +์‚ผ๊ตญ +์„œ์„œํžˆ +์ผ์ฒด +์™€์ด +ํ˜ธ์‘ +๊ฐœ์‹œ +ํ‘์ธ +ํ”ผ์•„๋…ธ +์Šคํฌ๋ฆฐ +๋ฏธ๊ตญ์ธ +ํฌ๋ฆฌ์Šค๋งˆ์Šค +์•„๋ฆฌ +๋‚˜์˜ +๊ธฐ์ฐจ +๋ฒŒ์–ด์ง„ +##DP +์ด์•ผ๋ง๋กœ +์ฐพ์•„๋ณด +1986 +์žฌ๊ฑด์ถ• +๊ฑด๊ตญ +pp +##์šฉ์ฐจ +์ถ”๋ฝ +๋‚˜์•„ +์ธ๊ฒฉ +์–ด๋ฆฐ์ด์ง‘ +์›์œ  +ํด๋ฆฌ +์žฌ์ •๋ถ€ +ํ† ์š”์ผ +๋ช…๋ถ„ +๋ถ€๊ทผ +์กฐ๋งŒ +ํŽผ์ณค +์ฃผ๋ฐฉ +์•„๋ž +Un +##ran +01 +๋ณ‘ํ–‰ +์•ก์…˜ +๋ฐ”๋‘‘ +ํ•ด์„ค +๋‚ด์ • +##์Šค๋ ˆ +๋ฒˆ์ง€ +์ „๋ผ +๊น€์ผ +์ธํ˜• +์–ด๋–จ +##๋™๊ฐ• +์•„์ผ๋žœ๋“œ +##ttp +๋ณด์ขŒ +์ €์†Œ๋“ +์ด๋ค„์กŒ +์œŒ๋ฆฌ +๋“œ๋ž˜ +๊ณต์„ธ +์ ‘ํ•˜ +##๋‘ฅ์ด +##ํ‹ฐ๋‚˜ +##๋ฐ์ด +์†Œ์› +์˜ˆ์ปจ๋Œ€ +์ƒ๊ธธ +ํŽธ์˜์  +์‹œ์‹  +์šฐ์œ„ +ํฌํ„ธ +http +์ฐฉ๊ณต +์›ํ™œ +์Ÿ์  +##์˜คํ”„ +##ort +์ผํ™˜ +์ฒœ๋ฌธ +ํ”Œ๋กœ +์‚ฌ๋ฌด์ด์žฅ +๋‚ด๋ณด +์ถœํŒ์‚ฌ +๋†€๋ž +๋– ์˜ฌ๋ž +์˜ค๋ธŒ +๋น ์ ธ๋‚˜ +์ง€๋ถ• +์•ผ๊ฐ„ +์—ฌ์‚ฌ +๋ถ„ํฌ +๋ฐœ๋ผ +##if +์•ผ์™ธ +๋…ธ๋ฅด +๋ฐœ๋ชฉ +๋Œ€๋ฐ• +ํ‰๋ก  +##์ œํ’ˆ +์„ธ์ œ +ํด๋ž€๋“œ +ํ•œ๋™์•ˆ +๋„๋ชจ +๋ฐ”๋€ +์‹œํŠธ +๋ฐœ์ฃผ +ํ‡ด๊ทผ +์ฐฝ๋ฌธ +์ƒˆํ•ด +๋˜์ ธ +์„œ์‹ +๋ชป์ง€ +์„ธ์ผ +์–ธ๋• +์ •๊ทœ์ง +์ทจํ–ฅ +์˜ˆ์ˆ ๊ฐ€ +๊น€์šฉ +๋ฌธ๊ตฌ +ํ…์ŠคํŠธ +๊ณ ์Šค +์ง€๊ฒฝ +์ˆœ์ฒœ +ํ›„์† +๋‹น๋‚ด +์ฐฝ๊ณ  +๋‹คํ–‰ํžˆ +๋ช…ํšŒ +๋ฐฐ์ • +๋…ผ์ˆ  +์„ ํ–‰ +์‹๋Ÿ‰ +ํ•œ๊ตญ์–ด +์ด๋ฃจ์–ด์ ธ +##์‚ฐ์‹œ +๊ต์ • +์˜๊ฐ +์ถ•์  +๊ณต์ต +##๋‹ˆ์ฆ˜ +์ž๋ถ€ +##์ž„์Šค +##ํ˜„์ง„ +ํž˜์ž… +ํ™˜๊ฒฝ๋ถ€ +์ฝœ๋ผ +๋ฒŒ๊ธˆ +๋…ธ๋™๋‹น +์ง์žฅ์ธ +##ํ•ด์•ผ +##๊ตฌ์—ญ +์˜ค๋ฅ˜ +๊ต์ฐจ +ํ”„๋ผ์ด +๋‚จํ•œ +๋ˆ„์›Œ +๋ฉ”์ธ +๋ฌด์ง€ +์‘๊ธ‰ +๋…ธ๋™์กฐํ•ฉ +์ˆจ๊ฒจ +์กฐ์„ ์ผ๋ณด +๋ฐ”ํ€ด +์—ด๊ธฐ +IB +##๊บผ๋ฒˆ์— +ํ•œ๊บผ๋ฒˆ์— +##๋ฆฌํฌ +์ด๋ผํฌ +##์…”๋„ +๋ฐ˜๋ž€ +๊ตฌํ˜ธ +์ ˆ๋Œ€๋กœ +์ˆ˜ํ—˜ +๊ด€์žฅ +๊น€์—ฐ +๋™๋‚จ์•„ +๊ทธํ•ด +์ดˆ์›” +์–ต์šธ +๋”๋ธ” +๊ฐ„ํŒ +์•ž์„  +์˜ค์‚ฌ +๊ณ ์žฅ +์–‘๋… +๊ดด๋กญ +ํฌ๋กœ +๋‹จ์ถ• +๋„ฅ์„ผ +NH +๊ณ ๋ฌด +##ate +๋Œ€ํ•ด์„  +##ge +๋‹ค๋…” +Al +##ork +์•ˆ๊ฒฝ +์ˆ˜์†Œ +ํ„ฐ๋„ +๋น„ํŠธ +๋ฌผ๊ณ ๊ธฐ +๋งˆ๋ฅดํฌ +์ˆ˜๋‹น +ํŠธ๋ ˆ์ด +์–ต์•• +์Œ์ฃผ +๋น„์น˜ +์ •๋ง๋กœ +Com +์†”๋ฃจ์…˜ +๋“ค์—ฌ๋‹ค๋ณด +์ž์ œ +๊ฒฐํ˜ผ์‹ +๋’ค๋Šฆ +์žฌ๊ณ  +์ „๊ต +์„œ์ดˆ +์œ ํšจ +์žฅํ•™๊ธˆ +๋ฒจํŠธ +ํ•˜๋“œ +๋‚˜์„ ๋‹ค +๋‚ ์นด +์Šคํƒ€ํŠธ +๋ฉด์„ธ์  +๋˜๋ฐ +๋ณ„๋‹ค๋ฅธ +์€๊ฐ€ +์œผ๋Ÿฌ +##๋ถ€๋ฅดํฌ +์กฐ์ข… +์˜์ƒ +๋›ฐ์–ด๋“ค +##๋Ÿฌ๋“œ +์ƒ๊ณ„ +์ฃผํ•œ +๋ช…๋ฌธ +๋ฐœ๋ช… +๋ชฐ๋ฆฌ +๋™์•„๋ฆฌ +๋“ฑ์ง€ +์•„๋ฉ”๋ฆฌ์นด +LED +๊ฒฌ์ œ +์ค‘๊ฐœ +๊ณ„๊ณก +๋น„์œ  +์ถ”์ธก +๋…๋„ +์™ธ์น˜ +์žํšŒ์‚ฌ +๊ฑด์ˆ˜ +ํƒ€์ž… +1985 +๋ณ€ํ˜• +๋Œ€๋‹จํžˆ +##ne +##ํ† ๋ก +์ž…๋‹จ +๊ตญ๋ฌด์ด๋ฆฌ +์ƒ์ฃผ +์—ผ๋ ค +๋ถ€์ฒœ +ํ”ผ์ž +1500 +๋งŒ์ฃผ +์–ด๋ ค์›  +๋‹น๋ถ„๊ฐ„ +๋ฏธ์„ธ๋จผ์ง€ +๋”๊ตฐ์š” +Th +์†ก๋„ +์กฐ๋งŒ๊ฐ„ +๋‚ญ๋น„ +์ž˜๋ผ +๊ณต๊ธฐ์—… +ํ”ผ๊ณค +์กฐ์ƒ +๋“ฑํŒ +์Šค๋Ÿฌ์›Œ +์‘์‹œ +์ธ์‡„ +์ค‘๊ณ  +๊ด€๋žŒ๊ฐ +##ong +FIFA +์ฃผ๋ฆ„ +์ „์ˆ  +๋ฒ„๋ฆด +์ฐจ์„ธ๋Œ€ +์œ ๋ฌผ +##๋Ÿฌ์›Œ +๋“œ๋ ธ +์˜จ๋ชธ +130 +์ œ์ž„์Šค +๋ฆฌ์Šคํฌ +##๋Ÿฐ์Šค +๊ณต๊ตฐ +##ment +๋ฐฉ์œ„ +๋‚ธ๋‹ค +์ปคํ”Œ +์—ฌ์‹  +๊ฐ•ํ–‰ +##์–ด๋ถ™ +์ž์กด์‹ฌ +์ดํƒœ +์ˆ˜์‚ฐ +๊ฐœ์žฅ +์ขŒ์ ˆ +์ฒœ์žฌ +์†”๋กœ +##๋ฒ ์ด +ํฌ๊ด„ +๊ฐ„์‹ +์กฐ์„ธ +๊ฐ์ˆ˜ +์žฌํ˜„ +์ข…๋กœ +๋‹จ์  +์–‘์‹ฌ +๋Œ€์กฐ +๋ถˆ๊ฑฐ +๊ฒŒ์ด +๋จธ๋ฆฟ์† +์ง€์„ฑ +##๋ฒ„๊ทธ +๋™์„ฑ +์‚ฌ๊ฒฉ +๋‚จ๊ธด +๋„์ €ํžˆ +์ •๊ต +์„ฑ๋‹น +์‹œ๊ฐ€ +์ฐจ์งˆ +์‹œ๋‚˜๋ฆฌ์˜ค +##์ฃผ์˜์ž +๋“ฑ์‚ฐ +์˜์•ฝ +์ œ์–ด +์„ฑ์ˆ™ +์„œ๋‘˜๋Ÿฌ +์ƒ‰์ƒ +์šฐ์Šค +ํ‹ฐ์ผ“ +ํ›„์ฟ  +์ข…์ „ +๊ต์› +์ž„์ƒ +๊ฒฝ์ œํ•™ +New +์—ญ์„ค +##ure +๊ฐ์ง€ +US +An +๊น€์ •์ผ +๊ฐ€๋ฒผ์šด +๋‹จ๋‹ค +๋‹ฌ๋ž +๊ณ„๋ž€ +์ตœ์ˆœ +๋กœ๋น„ +๊ฐ‘์ž‘ +๊ตญ์‚ฐ +ํžˆํŠธ +๋ชฉ๊ฒฉ +์„ธ๋ฏธ๋‚˜ +๋ถ€๋„ +์†Œ์šฉ +๊ฐ์˜ฅ +์ด๋งˆ +์ค‘๊ฒฌ +๊ด€์ค‘ +์ผ์–ด๋‚  +์ผ์œผ์ผฐ +์ผ€์ด์Šค +์ด๋ฐ +๋™ํ–ฅ +๋น„๊ฒฐ +๋ถˆํ™ฉ +์œ„์žฅ +ํˆฌ๊ธฐ +์–ด์ฐจ +์–ธ๋” +๊ฑด์„ค์‚ฌ +์ €ํ•˜ +์นด์ด +ํ˜„์ง +๋‹น๋‡จ +ํ•œ์ž +์ฝ”๋ฏธ +์†Œ๋งค +์žฅ์„ฑ +1984 +๋ฏธ์ณค +๋›ฐ์–ด๋„˜ +๊ฐ€์งœ +์ถ”์ถœ +์žฌํ•™ +์—ฌ์™• +์ฒ˜๋ฐฉ +๋“ค์–ด์„  +์ข…์ผ +##OC +์ค‘๋ถ€ +๋ ๊นŒ +์—ฐ์Šน +๊ตญ์™• +์ฃผ๊ณ  +์•„์‰ฌ์›€ +์‹œ์ˆ  +๊ฐ•์•„์ง€ +ํƒœ๊ทน +์†Œ์ฃผ +๋“œ๋ฌผ +๋ฒ•๋ฌด๋ถ€ +์‚ฐ์†Œ +์ œ์‚ฌ +๋ฉด์ œ +๊ณต์‚ฐ๋‹น +์•„๋ƒ +๋‘๋ฃจ +์Œ์‹์  +์„ธ๋ จ +๋˜‘๊ฐ™์ด +ํ•œ๋ฅ˜ +์—ฌ์ฃผ +์ €์„œ +๊ทผ์› +##๊ฑธ์Œ +๊ฐ„ํŽธ +๋‚˜๋‰˜ +๊ตฌ๋‘ +ํŠธ๋Ÿญ +๊ณต๋ฐฉ +๋‚ด์™ธ +๋ชจํ˜• +์–ด์ฐจํ”ผ +๋ฐœ๊ฐ„ +๋‚ด๋‹ฌ +๋ฒ ์ด์Šค +##ial +์‹ ๋… +๋ถ€์Šค +์ด์œค +๋ฌด์„œ์šด +์™ธ๊ตญ์–ด +์˜์ •๋ถ€ +๋Š˜์–ด๋‚  +์•ฝ๋ฌผ +๊ณต์ •์œ„ +์š”์ƒˆ +๋‹ˆ๊นŒ์š” +##ํ•ตํ™” +์ง๋ฉด +๋น„ํ•ตํ™” +์ธ์ ‘ +์ค‘์ˆœ +ํ˜ˆ๊ด€ +##์„ธํฌ +์šดํ•ญ +์ถœ์ž +ํ”„๋กœ์„ธ +๋ƒ‰์žฅ๊ณ  +์‚ฌํ˜• +์†Œ์…œ +์ „์ฒ  +์ง€๋Šฅ +์ˆ˜์„ฑ +๊ฑฐ๋’€ +CD +์˜ค์ผ€ +๋‹ฌ๋ผ์ง€ +์•ฝํ™” +๊ณผ์ž‰ +์–ด๋‘์šด +์‚ฌ๋ น๊ด€ +##๊ณ ๋ฆฌ +์„ ์ˆ˜๋‹จ +110 +160 +๋„์š” +๋ธŒ๋ฆฌํ•‘ +์‹œ์ผœ์•ผ +##๋‹‰์Šค +์ฃผ์œ  +์ฒด์ธ +##se +์ง€๋ฒ• +์ฆ์„ธ +์‘์šฉ +๋‚™๋™๊ฐ• +๋ฒ”์ธ +์ง€๊ฒ€ +##์ŠคํŠธ๋ผ +์จ์•ผ +๋ชจ๋ฒ” +##์ผ์œผ +๋ถˆ๋Ÿฌ์ผ์œผ +์˜ค๋กœ์ง€ +์ฐพ์•„๋‚ด +##์•„๋‚ด +๋งก๊ธฐ +์•„์ด๋Œ +์ฒœํ•˜ +ํ™•๊ณ  +์›์ž‘ +์—ผ๋‘ +์—ฐ์„ธ๋Œ€ +์ˆ˜๋ น +ํŠน์œ„ +IMF +์ง€๋ชฉ +๊ตํ†ต์‚ฌ๊ณ  +ํ˜ธ๊ธฐ์‹ฌ +##๋œจ๋ ธ +๋ฌดํšจ +##๋“œ๋ผ +๊น€์ˆ˜ +์Œ์šฉ +์‚ฌ์—…์žฅ +์˜ค์Šค +๊ตฐ์ฃผ +๋ฏผ์ƒ +ํ–‰ํƒœ +์ •ํ•œ +๋ช…์˜ +๋žญํ‚น +๋‹คํ +์ œํœด +Con +์‹ญ์ž +๋ฐฐ์›  +์ „์ง +๊ฐ•๋ ฌ +##๊ฐ€๋ฆฌ +์ •๋ณต +์œŒ๋ฆฌ์—„ +##์•ผ๋งˆ +์–‘๋ณด +์ฆ์ธ +๋– ์˜ฌ๋ฆฌ +์ง€๊ฐ +์†Œ๋ฉธ +๋Š˜๋ ค +๋Œ„์Šค +์ฃผ๊ณ ๋ฐ› +์ฃผ๋จธ๋‹ˆ +##ber +๋‚˜์™€์„œ +๊ผฝํžŒ๋‹ค +##ํŠธ๋ผ +๋ณต์Œ +๋‹ค์ง +##NG +๋“ฑ๋ก๊ธˆ +๋ฐ์ดํŠธ +๋‚˜ํƒ€๋ƒˆ +๋‚ฏ์„  +์šฐ์ • +์œ ์กฑ +##๊ณตํ™” +์ด์‚ฐ +์˜ค๋ฅผ +๊ธฐ๋‹ค๋ ธ +๊ผฝํžˆ +์ƒ์„ธ +๋ฐœ๋ ˆ +ํ˜‘๋ฐ• +๋ฐ˜์‚ฌ +๊ฑธ์นœ +๊ฐ๋‹น +๋ฐฉ์†ก์‚ฌ +๋„˜์–ด์„œ +์›์กฐ +์†Œ๊ฐ +ํ•ญ๋งŒ +ํ„ฐ์ง€ +##๋ ˆ์ด์…˜ +์ด์ˆœ +์–‘๋„ +##ํ•˜๋‚˜ +๋ฌด์ธ +์ž…๋ ฅ +์ฐฝ์›์‹œ +๋‘˜๋Ÿฌ๋ณด +ํƒ€์  +๋งˆ์•ฝ +์ƒˆ์šฐ +์žฌํ™œ +์ปค๋ฎค๋‹ˆํ‹ฐ +food +๋กœ๋งจ +์ˆ˜๋ช… +๋ชฐ๋ž˜ +ํฌ์ง€ +๊ตญ์  +์ง€๋‚ฌ +๋ฐ”์ง€ +๋‹ˆ๋‹ค +๋ฒ”์ฃผ +de +๋ฒ ๋ฅผ +ํ—ˆ๋ธŒ +##๋™์ƒ +์ตœํ›„ +##๊ด€๊ณ„ +์šฐ์—ฐํžˆ +์š”์•ฝ +๋ผ์ด๋ธŒ +๋Œ€์ˆ˜ +๋ถ€์ด๋ฆฌ +๋ฒ ๋„ค +์ฐฉํ•œ +์ˆœ์ด์ต +์—ด๋งค +์ž…๋ง› +##80 +์นด์šด +์•ˆ์ชฝ +๋ƒ๋ฉด +์ฝ”์Šค๋‹ฅ +1982 +๋งค๋‹ˆ์ € +๊ฐ€ํ•˜ +๋…ธ๋ฆ‡ +##์—ฐํ•ฉ +๋งˆ๋Š˜ +๋ณด๋ฌผ +๋ฌด์ฃ„ +๋…ธ๋ž€ +๋‹จํŽธ +์‚ญ์ œ +ํŒจ์Šค +๋ฅ˜ํ˜„์ง„ +##๋‹ค์ง€ +๋ธ”๋กœ๊ทธ +##์œ„์› +๋‚จ์–‘ +๋„๋ฆฌ +ํŠธ๋กœ +์•ˆ๋‹ค +์ˆ˜์†ก +์ •์„ธ +์•„๋‹™๋‹ˆ๊นŒ +๊ณ„์‹  +์ €๋„ +##๋น„๋“œ +๋ฐ˜์ง +ํ•œ์šฐ +ํ•ญ๊ณต์‚ฌ +์น˜๋งค +๋Œ๋ ค +๊ด‘๋ฒ” +์ธ์ฒด +์ค‘๊ธฐ +๋นŒ๋ฆฌ +##์˜ฌ๋กœ +๊ฒฝ๊ด€ +์œผ๋ฆฌ๋ผ +๋ณด์—ฌ์ค€ +๊ณผ์‹œ +##๋ฆฌํ‹ฐ +์„ธ์ด +๋Œ๋ ธ +##์‹œ๊ฐ„ +ํฌ์ฐฉ +๋ฌด์„ญ +##iz +๋ถ„๋ฐฐ +##์ €๊ธฐ +##์œ ๋Ÿฝ +์ƒ๋ฆฌ +์ด๋ณ‘ +์˜ˆ์˜ +๋ชจ์ด +์œ ๋ง +์œผ๋ ค๋Š” +๋‚ด์ฃผ +์›จ์ด +๋”ฐ์ ธ +๋– ์˜ค +์•ฝํ•œ +๋ณด์ถฉ +์„ ํฌ +์ค‘์„ธ +ํ™”ํ•ด +๊ฐ•์ขŒ +##ํ™”๋ฌธ +์—ฌ๊ธฐ์ €๊ธฐ +LH +ํžˆ๋กœ +##๋ ˆ์ผ +๋ถ€์–‘ +์˜์›ํžˆ +์ผ๋ถ€๋Ÿฌ +๋‚ด๋ ค์˜ค +##ain +๋ฏธ๋ฃจ +์ฒญ์ • +์œ„์•ˆ๋ถ€ +์ฒซ๋‚  +inst +๋งˆ๋ผํ†ค +##ore +๋”๋Ÿฌ +์ ๋ฆฝ +๊ฑด์ถ•๋ฌผ +์—ฌ๋Ÿ +ํ‚ค์Šค +ํ•œ์–‘ +์–ผ๋ฅธ +๊ธˆ๊ฐ +์„ธ๋ฌด +์บ˜๋ฆฌํฌ +๋ถ๊ตฌ +๊ณต๋ฆฝ +๋ถˆ๋Ÿ‰ +์žƒ์–ด๋ฒ„ +ํ•˜์ฒœ +๋Œ€์™• +์˜์™ธ +๋ชฉํฌ +์•…๊ธฐ +๊ต๋„ +์–ต์ง€ +ํ† ๋ผ +์‹ ์ƒ +##ian +์‚ดํ”ผ +์šฐ์ฆˆ +๊ทธ๋ž˜์•ผ +์˜คํ”ผ์Šคํ…” +##๋†€์ด +๋ถ€์žฌ +ํ˜ผํ•ฉ +๋Œ€๊ตญ +๋ง›์Šคํƒ€ +์บ˜๋ฆฌํฌ๋‹ˆ์•„ +๋‹จํ–‰ +๋จธ๋ฌด +๋ฉ˜ํ†  +๊ตญ๋ฌผ +๋…ธ๋ฒจ +๋„๋ฐ• +์ฐพ์•„์˜ค +ํ•œ์ง„ +๊ธ‰๋“ฑ +์ž ์‹ค +๋ฐ”๊ฟ€ +์†์„ฑ +์—…์ž +์ฐธ๋ชจ +##man +๊ฒฐ๋‹จ +๋นˆ๊ณค +๋ชน์‹œ +๊ณ ์ธ +์ธ์น˜ +๋ฐ˜์ „ +์ฝ”๋ฏธ๋”” +์ˆ˜์—ฌ +๋งˆ์Šคํฌ +์ œ๋ฐœ +##ํ† ๋ฆฌ +๊ด‘๋ช… +์ •๋‹ต +##์œ ์‚ฐ +์ˆ˜์‹œ๋กœ +์ˆ˜์‹  +์ค‘๋ฆฝ +ํ”„๋žœ +์ทจ๋ฏธ +##์Šคํ† ๋ž‘ +๋ ˆ์Šคํ† ๋ž‘ +๊ฐ๊ธฐ +๋ฒ„ํ‹ฐ +๊ณ ๋ ค๋Œ€ +140 +์žฅ๋ฏธ +๋‚จ๊ตฌ +๊ธฐ๋‘ฅ +๊ฐœ๊ทธ +##๋ฃจ์Šค +์ง€์—ญ๊ตฌ +์‹œ๋‹ฌ๋ฆฌ +๊ณ ์Šค๋ž€ํžˆ +์‹œ๋ฆฌ์•„ +๋ถˆ์•ˆ๊ฐ +๋ณด๋‚ผ +๋ผ์ดํŠธ +ํ‘œ์ง€ +๋ถˆ์‹  +์ง€๋‚œ์ฃผ +ํŠธ๋ž™ +์†Œ๊ทน +ํ˜ธ๋ž‘์ด +์•ˆํƒ€๊น +๊ธฐ์„ฑ +##๊ณตํ™”๊ตญ +์ˆ˜์Šต +๋ฐ•์˜ +์œ ์ธ +์‹ ํ˜ผ +ํ•™์—… +ํ•ฉ์ž‘ +์„ ์ œ +์•ž์žฅ์„œ +ํƒ€์œจ +์ค‘์žฌ +##๋จธ์Šค +๊ณต์ž +##๋นจ๋ฆฌ +์Šน์šฉ์ฐจ +์„ ์กฐ +ํ˜ธ๋ฅด +์žฅ์ธ +๊ฐ€์ ธ๋‹ค +ํ˜ˆ์•• +์นจ๋žต +๊ทœ๋ฒ” +๊ด€ํ•ด +์šธ๋ฆฌ +๊ฑด๋„ค +ํ†ต๋กœ +๋ฌธ๋“ +๋˜์กŒ +๋Œ€๋žต +ํ™œ๋ ฅ +ํ•œ๊ฒฐ +์‹œ๋ฆฝ +##ss +Mar +์šด๋™์žฅ +ํŒŒ๋ผ +ํ† ํฌ +์ด๋ค„์ง„ +์ „์•ก +๋ฌผ๋ ค +##av +๋งˆ๊ตฌ +์ธ๋ฏผ๊ณตํ™”๊ตญ +์•„ํ™‰ +๋ฌด๊ฑฐ์šด +์ง‘์ฐฉ +๋ฐ€๋ฆฌ +๋Ÿฌ๋ธŒ +##์™ธ์„  +์˜ค์ŠคํŠธ๋ฆฌ์•„ +์–ด์ง€ +์‚ฌ๋ช… +๋“ฑ๊ธฐ +๋ฐํ˜€์กŒ +๊ฐœ๊ด€ +1983 +๋ฆฌ์Šค +์•„๋‹Œ๋ฐ +##โ”โ” +ํ”ผํ•  +ํŒŒ๋ž€ +##ov +ํ‹€๋ฆผ์—† +##๋ฌด์„ฑ +๋‚ด์„ธ์šฐ +๋ชจ๋“œ +์„ฑ์‚ฌ +์„œ๋ฐฉ +##์Ÿ์ด +ํด๋ฆฌ +๊ฐ„์ ˆ +##ee +ํ‘์ž +๋ฆฌ๋ชจ +๋‹ฌ๋ฆฐ +์šฉ๋Ÿ‰ +##ly +์ƒ์‚ฐ๋Ÿ‰ +๊ธฐ์˜ +๋Š๋ƒ๋Š” +##์ง€๋‹ˆ์–ด +์ฃผ์–ด์ง„ +์ƒ๊ถŒ +๊ฒฌ๋”” +์ฆ์ • +๊ฐ์ง€ +๋งค์›” +์ถ”์‚ฐ +๋†๋‹ด +์•…๋งˆ +์ด๋ถ„ +์—…์  +์†์ž +##์˜ฌ๋กœ๊ธฐ +๋„คํ‹ฐ +์˜จ๋‹ค +์‹œ์ฒด +๊ฐ€๋ผ์•‰ +๋‹ฌ์ฝค +##์ง€๋Šฅ +๊ถŒ์œ  +์ค„์ผ +์กฐ๊ทธ +๋ฐ˜๋ ค +์•ผ์ƒ +๊ฑฐ๋‘” +์†Œ์œ ์ž +์šฐ์—ฐ +๋งˆ์ดํด +๋ธŒ๋กœ +์–‘์ธก +##์ €์Šค +์ถฉ์„ฑ +๋ณต๋ฌด +์ธ๊ณต์ง€๋Šฅ +์ค‘์‹ฌ์ง€ +๋‹น์ฒจ +๋…ธ์ด +๊ธฐ๋‹ค๋ ค +์ธ์„ฑ +๊ฐ์‹ธ +##์ฃผ์–ผ +ํ„ฐ์ ธ +##์…‰ํŠธ +๊ทธ๋งŒ๋‘ +๋ณธ๋Šฅ +๊ณต์ง์ž +์ œ์˜ +๋Œ€์ ‘ +ํ™”๊ฐ€ +๊ตฌ์‚ฌ +SM +๋ณต๊ตฌ +์ˆ˜๋…„ +๊ตฐ๋ถ€ +##ry +๋ฐ€์ ‘ +๋™์•„์‹œ์•„ +ํ”ผ์šฐ +๋‹จํ˜ธ +##af +๊น€์ผ์„ฑ +ํŠน๋ณ„๋ฒ• +์›ํ˜• +์ด๋ฐ์˜ฌ๋กœ๊ธฐ +๋ฏผ์˜ +์ถ”์ƒ +๋ฌผ๋Ÿฌ๋‚˜ +์•„๊ฐ€์”จ +์ฒด์ค‘ +์ถœํ˜„ +##์Šฌ๋Ÿฌ +ํŒŒ๋„ +ํฌ๋ฅดํˆฌ +##ight +์ •๋ฌด +๊น€๋ฏผ +๋ฐฐ์›Œ +๊ธ€์Ž„ +์ˆ˜ํ™• +์—ฐ๋‹ค +์ˆ˜์œ„ +๋„์ง€์‚ฌ +##DI +์–ด๋Š์ƒˆ +์• ๊ตญ +๋™ํ–‰ +๋ชฉ์š• +๋งˆ๋ฅดํฌ์Šค +##๋“ค์—ฌ +์˜€์„ +ํ†ตํ–‰ +์ด์ง€ +์‚ฌ๋ง‰ +์‹ ์ถ• +๋‚ด๋น„ +๊น€์ง€ +์—๋“œ +์ž๋งค +๋ฌด๋” +1979 +๊ทธ๋‹ค์ง€ +ํ•ด๋ผ +๋Œ์•„๊ฐ” +์‚ด๋ฆผ +๋ฌด๋„ˆ์ง€ +ํŒจ๋„ +##๋กœ๋‚˜ +์ผ์„  +์šฐ๋ฆฐ +๋„คํ‹ฐ์ฆŒ +๋‚ด์žฅ +๋ชจํ—˜ +AP +##ant +์ถ”๊ฒฉ +๋„๋‘‘ +์ ‘๋ชฉ +๊น€๊ด‘ +์ž์˜ +๋ฌธ๊ฑด +์‹๊ตฌ +์ด์‚ฌํšŒ +ํ•ด์ ธ +์‹œ๋„ˆ์ง€ +๋ผ๊ธฐ +๋‚ ์งœ +์œ„์ƒ +์ฒญ์ถ˜ +๋‚ด๋†จ +์˜๋ฆฌ +์ธํ„ด +BM +๋งž๋Œ€ +์ƒํ•œ +์ •ํ™ฉ +ํž๋ง +์•ˆ์„ฑ +์€์ง€ +์ด‰์ด‰ +๋ฉดํ—ˆ +##40 +์ฃผ์ „ +ํ—Œ์‹  +ํšจ๋ ฅ +์ƒ์„  +๋ชจ๊ธˆ +์ „๋„ +์†Œํ™€ +๊ฑฐ์„ธ +์ฑ…์ž„์ง€ +๋ฐ”์œ +on +##FC +ํ—ฌ์Šค +๊ด‘๋ณต +์Šคํ”ผ๋“œ +๊ธฐ์–ด +##end +๋˜ํ’€์ด +ํ•˜๋„ +๊ด€๋ฆฌ์ž +์˜ต์…˜ +##์˜ค์Šค +์ž๊ฒฉ์ฆ +๋ณ„๋ช… +์ง‘ํ•ฉ +์ฐฉ๊ฐ +๋ฐ€์–‘ +๊ตญ๋น„ +๋ฒค์น˜ +##๊ธ‰์‹ +์•ฝ์ž +##ํƒ€์šด +๋ณด๋ณต +๋งŒ์„ฑ +๊ธˆ๊ฐ์› +๋™์ง€ +ํ›„์† +##๋‹ˆ์ŠคํŠธ +์–ด๋–จ๊นŒ +์น˜์•„ +๊ฐ€์ค‘ +๋ณด๋ฆฌ +๋ฌด์ƒ๊ธ‰์‹ +๋ฐ˜์ฐฌ +ํ—ค๋“œ +ํ•„๋ฆ„ +๊ด‘๋ฒ”์œ„ +๊ด‘์–‘ +๊ฐ€์ฃฝ +์†Œ๋“์„ธ +ST +##ack +ํ”Œ๋ ˆ์ด์˜คํ”„ +๊ธฐ๋ป +Ar +๋ง๋ ˆ์ด +##๊ด‘์—ญ +ํ† ๋ก +๊ฒŒ์žฌ +์˜ฎ๊ฒผ +๊ฒฝ์ฐฐ๊ด€ +์ฐพ์•„๋ณผ +์˜ฌ๋ฆด +1945 +๋ฉ”๋ฅด์Šค +ํƒ„ํƒ„ +๊ณ ์ข… +ํ•ด์‚ฐ +์ƒ๋Ÿฌ๋“œ +์‹ ์ข… +์œผ์‹  +์žฌํŒ์†Œ +์ขŒ์„ +##๊ตฌ๋‹ˆ +ํ•™๋…„๋„ +์•„์ด์Šค +๊ฐ€๋งน์  +์‹ ํƒ +์ง„์ „ +๋ถ€์ถ” +๋”ฐ๋ผ๊ฐ€ +๋ฒ ๋ฅผ๋ฆฐ +์˜์ฃผ +๋งˆ๋น„ +์ˆ™์†Œ +##ith +์›Œํ„ฐ +ํ–‰์„ฑ +์—ํ”ผ +##๋ฐ€๋ฆฌ +์นด๋ฅด +๊ถค๋„ +์žฅ๊ต +์‹œ์ƒ์‹ +๊ฒฝ์‚ฌ +๊ต์—ญ +์ง€๋ฅด +##oll +๋ถˆ๊ฝƒ +##ast +๋ฉด๋‹ด +๊ณ„์ • +์‚ฌ๋„ +๋ฏธ๋“œ +๊ตฐ๋ฐ +##ence +๋™๋ถ์•„ +ํŒŒ๊ณ  +์—ด๋ ค +๋…ธ๋™๋ถ€ +๊ธ€์”จ +์ƒ์› +##๋งˆํ†  +ํƒ€์ž„์Šค +MO +##์‹๊ฐ„ +๋ฐ”์˜ +์ง€ํ˜• +์ ‘์–ด๋“ค +##๊ทธ๋ ˆ์ด๋“œ +์ž”๋œฉ +์—…๊ทธ๋ ˆ์ด๋“œ +##๊ฑธ์ด +##ass +๊ฐ•ํ•ด +๋ฒ ์ด๋น„ +์œ ๊ต +ํƒ‘์Šน +๊ฐ€๊นŒ์›Œ +๋ฌผ์Œ +์ž ์ˆ˜ +์ตœ๊ฐ• +์†Œ์ง‘ +๊ตฌ์ฒญ์žฅ +๊ณจ๋“  +๋ฏธ์šฉ +์™€์ค‘ +๋ฒ„๋ฆฐ๋‹ค +์ˆ˜์ƒ์ž +ํ•ญ์†Œ +##per +๊ด€๊ด‘์ง€ +##๊ณ ์† +์ ˆ์•ฝ +1981 +๊ตฌ๋ฆฌ +๋ณด์Šค +์‚ฌํ˜ +##agram +์˜ค์‚ฌ์นด +์„œ๋ฒ„ +##ung +์Šน๊ฐ•์žฅ +##๋‘๋ฅด +์ปค์กŒ +์„ฑํ˜• +์ฒด์งˆ +์‚ญ๊ฐ +##๊ฑฐ๋‚˜ +##ํฌ๋ผ +ํ•‘ํฌ +๋‚ด๋ฆด +##๋ž๋‹ˆ๋‹ค +๋ฐฑ์•… +๊ฐ๊ธฐ +๋ฐฐํฌ +ํŽธ๊ฒฌ +๋†’์ผ +๋ณด๊ฐ• +##โ €โ €โ €โ € +##๋ธŒ๋ฆฌ๋“œ +๋ช…๋ชฉ +๊ฒฝ์˜์ง„ +์•ŒํŒŒ +์•„๋ฅดํ—จ +##๋ฆญ์Šค +๊ธ‰๊ฒฉํžˆ +์Šค์ฟจ +ํ•˜์ด๋ธŒ๋ฆฌ๋“œ +๋ฐ”๊พผ +##olog +๋งž์ถ˜ +๋™์•„์ผ๋ณด +์—ฌํŒŒ +์ „๋ฝ +๊ฐ€๋ฆฌํ‚จ๋‹ค +06 +๊ฐ€์ ธ์˜ค +๊ณ ์ณ +์ด๋“ +์ˆ˜๋ จ +๋ชจ์ธ +์–ด๋”˜ +์ถ•์‚ฐ +์•Œ๋ ‰์‚ฐ +๋งˆ๋ฆฌ์•„ +์•„๋ฅดํ—จํ‹ฐ๋‚˜ +##ond +์ œ์ด +๋™๊ตด +๋ Œ์ฆˆ +ํ˜„๋ช… +์œผ์…จ +##๋ฌด์ง€ +๊ฐ์‚ฌ์› +์จ์„œ +๋ฉ”๋‹ฌ +์–‘๋ฐ˜ +๋ฐฑ์ธ +##๋ฒ ๋ฅด +์ฒ˜์žฅ +์ถœ๊ตฌ +์ •์กฐ +ํ˜ผ์ธ +##๋ฐ”๋ผ +์„ ๋ช… +์ง€์ƒํŒŒ +์˜ˆ์‚ฐ์•ˆ +##OS +๋ฐ˜ํ•ด +์œ ๋ฐฉ +์•„๋ฌดํŠผ +GDP +๋‹ค๊ฐ€๊ฐ€ +##ํ”„๋ฆฌ์นด +์ •์œ  +๋ ˆ์ด์Šค +๋ณด์‚ด +์ˆ˜๊ฐ• +๋ฏธ์Šค +์ข…์—… +์นจ์ž… +๋ถ€๋ฅผ +์ฐฝ๋‹จ +๋ฌธํ—Œ +##์ฐจ๋ก€ +##oh +##ear +์„ํƒ„ +์ค‘์ง€ +ํ–‰ํ•˜ +์—ฌ๊ณ  +๋‹ด๋ก  +์ถ”์ฒจ +์‹ ๋™ +๋จน๋ฐฉ +์ฒœ์ฃผ +๊ฐ•๋‚จ๊ตฌ +์ •๊ตญ +๋‚™์ฐฐ +๊ตฌ์žฅ +์ˆœ์‹๊ฐ„ +๋ฏธ์ง€ +์—‡๊ฐˆ +ํ’์„ฑ +์‹œ๋น„ +์ด์Šน๋งŒ +ํ”Œ๋ผ์Šค +์Šน์  +๋”์ฐ +๋Š๋ƒ๊ณ  +์ธ์กฐ +ํšŸ์ˆ˜ +๊ตํ›ˆ +๋‚ญ๋งŒ +๋ฌผ์–ด +์ด์–ด์ง„ +์ž์ด +์ž„์ฐจ +์ „๊ธฐ์ฐจ +๋ฉ”๋ฆฌ +๋ฐ˜ํ™˜ +์ „์†ก +##ice +##์ด๋„ +##IC +##ust +์ด๋Œ€ +๊ธฐ์ฆ +๊พธ๋ฉฐ +๋Œ€์น˜ +๋ฐœ์Œ +๋Œ€ํ•œํ•ญ๊ณต +ํ•˜๋‚˜ํ•˜๋‚˜ +##ํŒŒํฌ +์•ฝ์  +๊ณ ๊ฐ€ +ํŠนํ˜œ +์ „์ด +##๋ฆ…๋‹ˆ๋‹ค +์ž„์˜ +๋ ˆ์ € +์ผ๋ จ +์˜ˆ์–ธ +##tive +##ory +๋น„๋‹จ +๊ณต์˜ +๋ธŒ๋ผ์šด +์˜ˆ๋ฐฐ +์‚ผ๊ฐ +##๋ณด๋“œ +์‚ฌ๊ฐ +##์ €๋ฆฌ +๋ €๋‹ค +์ง„๋™ +๋ฒ„ํŠผ +์ผ์œผ์ผœ +์ˆ˜ํ˜ธ +์•„ํ‹ฐ +๋ฐœ๊ธธ +์žฌ๋ฐœ +๊ณ ์†๋„๋กœ +์‚ฌ๋ฃŒ +์ข…์—…์› +์ง€์›๊ธˆ +๋ฐฐ์šฐ์ž +๋ชป์ง€์•Š +##ํ‚ค์Šคํƒ„ +##๊ณตํ•ญ +์ง„์•• +์—ด์‡  +##์กฐ๋ฆฌ +๋Œ์–ด๋“ค +๊ฐ๋ฉด +๋“œ๋ผ์ด +๋‹น๊ธฐ +๋ฐฐ๋ถ„ +๋ฉ”์ผ +๊ฐˆ์•„ +ํ•ด๋ณ€ +ํด๋ž˜์Šค +์œ ๋… +๋ฐฐ์šธ +์š”์–‘ +์˜์•ฝํ’ˆ +์ผ์‚ฐ +ํ•ด๊ฒฐ์ฑ… +์“ด๋‹ค +185 +์„œ์  +๋ฒ•์กฐ +ํ•˜์ˆ˜ +๋Š๊ปด์กŒ +๋“œ๋ฆฝ๋‹ˆ๋‹ค +์•ˆ๋ฐฉ +์‚ฌ์ด์ฆˆ +๋ถ€์‹œ +๊ฐ€๋ฅด์นจ +1930 +ํ•ด๋ฒ• +์†Œ์„ค๊ฐ€ +ํฌ์ƒ์ž +ํ•ญ๊ตฌ +์žฅ๋ฒฝ +ch +๊ฒฝ์ „ +์‚ฌ๋ผ์ ธ +์กฐ์นด +๊ณต๋ฐฑ +ใ…Žใ…Žใ…Ž +์—ดํ’ +์‹œ์นด +๊ด€์‹ฌ์‚ฌ +๋‹น์„ ์ž +๊ธฐํ•˜ +์ค‘๋…„ +##๊ด‘์—ญ์‹œ +ํ•ด์šด๋Œ€ +##๋ณ‘๋Œ€ +์–ธ๋ก ์‚ฌ +์ง€๊ทน +ํ”ผ๊ณ ์ธ +๊ด€ํ•˜ +๊ด‘ํ™”๋ฌธ +๋ ˆ์ธ +์†Œ๋ง +##PS +๋‚˜๋ˆˆ +ํ˜ธ๋ฅด๋ชฌ +๋ฒŒ์ธ +์น˜์šฐ +04 +์˜๊ตฌ +์ฃผ์—ญ +๋น„ํ‹€ +๊ฐˆ๋น„ +๋‚˜๋น  +์Ÿ์•„์ง€ +๊ธฐ์ˆ™ +์—์–ด์ปจ +๋‹น๋Œ€ +์ข…์‚ฌ +์ƒํ’ˆ๊ถŒ +๋ถ„๋Ÿ‰ +ํ‰๋ก ๊ฐ€ +๋‹จ๋ง +๋ฒ•๋ น +์•ผ๊ธฐ +๊ฐ„๊ฒฉ +์‹ฌ๋ฆฌํ•™ +์˜ฌ๋ผ์˜ค +##๋ฐ€ํžˆ +์ฒญ์› +MC +์Šค๋Ÿฌ์›  +์šธ์Œ +์Šคํ‚จ +ํƒ€ํ˜‘ +์„œ์‚ฌ +๊ทธ๋Ÿฐ์ง€ +์ด๋ฉ” +๊ต์–‘ +๋œจ๊ฒ +๋ง‰๋‚ด +์„ธ๋ฅด +๋‹จ์ • +๋ณต์šฉ +๋ชจ์Šคํฌ +##์šด์„œ +101 +๋ฏผ์ • +ํ†ต์‚ฐ +์™ธ๊ต๋ถ€ +๋ถ€์ฐฉ +##๋ฐ์Šค +์™ ์ง€ +์ž‘๊ณก๊ฐ€ +##ord +์น˜๋ช… +ํ™œ๊ธฐ +์ •ํ™” +##์„ผํ„ฐ +์•„๋‚˜์šด์„œ +๋ฃจ์ด +LA +ํ™”์‚ด +##ational +์ž„์ง„ +๊น€๋ณ‘ +๋นจ๋ผ +์ด์–ด์ ธ +ํฌ๋กœ +์•„์‚ฌ +๋ฐฐ์†ก +์ฒ ํ•™์ž +์ด์–ด์ง„๋‹ค +์‘๋‹ต์ž +๋„์‹œ๋ฝ +์žฅ๋ ค +๊ฑฐํ’ˆ +์•ผ์ฑ„ +๋„ˆ๋จธ +์ผ์ผ +์‡„์‹  +๋ถ„์ž +์‚ฌ๋ฐฉ +SU +ํ๊ธฐ๋ฌผ +์…”์•ผ +์žฌ๊ณ„ +##์˜์‹ +์•ˆ์ฃผ +์‚ฌ์ œ +ํ’€๋ฆฌ +๋ถ„๋‹ด +##ํƒ€๋ฅด +๋ง‰์ƒ +๊ธฐ๋ฅด +๋ฌผ์ฒด +๊ฐ€๋ฃจ +์ง€์‹์ธ +์™ธ๊ณฝ +์†Œ๋ชจ +๊ฒฝ์œ„ +์„ผ์„œ +์ „์ž„ +ํŒŒ์ฃผ +ํšŒํ”ผ +๋ฐœํƒ +##์ €์šฐ +##๋‘๋ฆฌ +๋ฐ”๋ž๋‹ˆ๋‹ค +##๊ตญ์ˆ˜ +119 +์‚ฌ๋ชจ +๋†๋„ +๋น„์ถ” +๋‹จ์ ˆ +๋ฐฐํ›„ +๋งˆ์ธ +ํ•จ์ˆ˜ +์ˆ˜์ƒ‰ +ํ•˜ํ•˜ +์†Œ๋‚˜๋ฌด +##๋ฆฌ์ผ€์ด์…˜ +ํ˜‘์—… +ํฌ๋ฅดํˆฌ๊ฐˆ +๋ณด์„ +์ด์› +๋‚˜ํƒ€๋‚  +๋“œ๋ ˆ์Šค +๊ณผ์ž +##๋กœ์šฐ +๋ชจ์˜ +์–ด๋ ธ +๋“ฑ๋“ฑ +์ค€๋‹ค๋Š” +##์„ธ์ด +์ฒ˜๋…€ +##๋ชจํ†  +ํ„ฐ์น˜ +์‚ฌ๋ผ์ง„ +๊ฐ•๋™ +##๋ชฌ๋“œ +์ €์†Œ๋“์ธต +๊ณต์กด +๋–จ์–ด์ง„๋‹ค +๋ฐฐ๊ตฌ +๊ด‘์‚ฐ +๋ฐฐ์‹  +ํ•œ์ „ +์•„ํ•˜ +๋‘๋ ค์›Œ +์ขŒํŒŒ +170 +์ธ์ž +##ER +์ผ์ˆ˜๋ก +์ƒ์ • +ํฌ๋ฆฌ +์œ ์„ฑ +์ˆ˜์งˆ +๊ธˆ์š”์ผ +##ld +์ •ํ•ด์ง„ +##๋„๋“œ +์ „๋ฐฉ +##๋„ค์Šค +๊ตญ์„ธ์ฒญ +์ง€์˜ฅ +๋‚ด๊ฑธ +์†ํ•œ +์ƒ์šฉ +๋ฆฌ๋“ฌ +์—”์ง€๋‹ˆ์–ด +ํ•œ๋ฐฉ +๊ฒฐ์Šน์ „ +##์‚ฌ์—… +์Šค๋ฆฌ +์—ฐ์ด +๋“ํ‘œ +์•„์คŒ +๋Œ€์ฃผ์ฃผ +์—ด์•… +Pro +์—ฐ์•ˆ +๊ฐ„์ง +๊ป์งˆ +์„ ๋น„ +์ผ์œผํ‚จ +ํƒ„์•• +์ฒœ์‚ฌ +๊ตฌํ•ด +์ž…์  +##ook +์ˆ˜์žฅ +##ํ–‰์œ„ +๋ถˆ๋ฆฐ๋‹ค +์˜ค์ด +๋‚ด๋‹ค +๊ฒ€์ง„ +๊ณ ์น˜ +์š”๋ฒ• +์ฒœ์žฅ +##PGA +ํ™์ค€ +ํ–‰์šด +๋ฏผ์† +๊ณ ์„ฑ +์œˆ๋„ +๋‚ด๋ ค๋†“ +1973 +์ŠคํŽ™ +์ž์˜์—… +์•„์คŒ๋งˆ +๊ตฌ์ ˆ +ํ‹ฐ๋น„ +์นจ๋ชฐ +##์นด๋ฝ +##oun +##ํ‹ฐ์•„ +๋จธ๋ฆฌ์นด๋ฝ +์• ์ธ +๊ฐœ์ฒด +์ œ์ž‘์ง„ +์‹ ์ƒ +##์„œ๋ฆฌ +์Šค์นด์ด +๋จน๊ฑฐ๋ฆฌ +๋‹ค๋ฉด์„œ +๋กœ์ผ“ +์–ด์Œ +ํ†ต์‹ ์‚ฌ +๋ฐœ์˜ +Sh +##๋ ˆ์ดํฌ +์ด๋ฉ”์ผ +๋–จ์–ด์งˆ +์ฒ™์ถ” +ํ•ฉ์ฐฝ +์˜ค๋ฝ +๋Œ€์„ธ +์œ ๊ฐ +๋“œ๋Ÿฌ๋‚œ +##ํฌ์Šค +##์“ฐ๊ธฐ +์ฐจ๋ ค +๋ง์„ค +์œ ์Šน +๋‚˜๋…ธ +##age +ํƒˆ๋‹น +๊ณ ๊ณ  +๋ณด์ฆ๊ธˆ +๊ฐ„์„ญ +๊ฐํžˆ +๋‚˜๋ˆŒ +์š”์‹œ +1978 +๊ฑฐ์‹ค +๋น„์ž +์ƒ์ƒ๋ ฅ +ํŒŒ์žฅ +๋ง๋ ˆ์ด์‹œ์•„ +##์ง€๋ถ€ +๋งค๊ฐœ +๋†์–ด +์ŠคํŽ˜์…œ +์‚ฌ์–‘ +์ธ๊ณผ +๋Œ๋ณด +์“ฐ์—ฌ +๋ถ€์œ  +์ผ์ผ์ด +ํฌ์ง€์…˜ +๊ตํ†ต๋ถ€ +๋‹น์„ ์ธ +LC +๋ฉ”๋ชจ๋ฆฌ +##๋‚ด๋ฆฌ +์ผ์ œํžˆ +์—ฐ๊ณ  +์‹œ์–ด +ํก์—ฐ +์‚ฌ์†Œ +##๋”์Šค +๊ทธ๋ ˆ์ด +์žฌํ•ด +๋“ค๋ ธ +๋Œ€์‚ฌ๊ด€ +๋„‰๋„‰ +์•ˆ์‚ฐ +ํœด์–‘ +์œ„ํ•ด์„  +๋งค์šด +์•Œ์ฝ” +๊ฑธ๊นŒ +์ถœ๋™ +์ด์–ด์„œ +์ตํžˆ +์›”์„ธ +๊ทน์‹ฌ +์ดˆ๋“ฑํ•™์ƒ +๋„์ถœ +๋‹จ์‹ +๋„์šฐ +์‹ค์€ +๊ฐ„ํ˜ธ์‚ฌ +๋ฒŒ์—ฌ +๋ช…์†Œ +์ค‘์‚ฐ +์ž์œ ์ฃผ์˜ +๊ทธ๋ฃจ +์Œ“์—ฌ +๋˜๋Œ์•„ +์œ ๋ฃŒ +ํ–ฅ์ˆ˜ +๋ธ”๋ผ +##๋ฉ˜ํŠธ +๊ฑฐ์น  +์˜คํ”„๋ผ์ธ +Sp +ํ™์ค€ํ‘œ +๋ถ€์ž„ +๊ฐ€๋ น +ํšŒํ™” +๋ฐฑ์‹  +์‚ฐ์ • +๊ทธ๋žœ๋“œ +์ œ๋ฒ• +์ถ”์œ„ +์—„์ฒญ๋‚˜ +์˜ค๋ฒ„ +KBO +LTE +๊ธฐ์—…์ธ +์•ˆ๊ฒจ +๋ฐฑ์•…๊ด€ +์ „์ฐจ +##๋‹ค์ž„ +##์†Œ๋“œ +con +ํƒ๋ฐฐ +ํŒจ๋Ÿฌ๋‹ค์ž„ +ํšก๋ น +์ˆ˜์ง +์›ํ™” +ํ•œ๋ˆˆ +์„ ๋ณด์ธ +์—ฐ๊ตฌ์ž +๋ฐ•์ฐจ +์‚ฌ๋ง์ž +๋…๋ฆฝ์šด๋™ +๋ง‰ํŒ +์œผ๋ ค๋ฉด +##70 +์ง€๋ฃจ +์„œ์› +๋ณ‘์—ญ +ํ”Œ๋ ˆ์ด์–ด +๊ทธ๊นŒ +##ect +๋ฐœ์ƒ +##์‹ ๋ฌธ +๋ฐ•์› +ํ†กํ†ก +๋‚œ๋ฏผ +ํผ์ ธ +๊ธฐ์„ธ +์›”์š”์ผ +์กฐ์กฐ +๊ฐ•๋ณ€ +##ens +๊ท€์‹  +์กฐ์œจ +์ฆ์—ฌ +์–ด๋ฆฌ์„ +ํ–ฅํ–ˆ +##aily +๊ฐ–์ท„ +##ํ…Œ์ด๋„ˆ +์œ ์„ธ +๊ฐœ์กฐ +์š”ํ•œ +์‹ ๋‚˜ +์˜จ์‹ค +์•”์‹œ +ํŒŒ๊ฒฉ +๊ณต์กฐ +ํ•ฉ๋ฒ• +์ฐจ๋ถ„ +์ง€๋‚˜์นœ +์˜ค์ผ€์ŠคํŠธ๋ผ +ํŠผํŠผ +DM +๋“ค์–ด์˜ฌ +1920 +์ƒ์ˆ˜ +์ผ๊ฐ€ +##๋Ÿฌ์กŒ +๋“ค์–ด๊ฐ„๋‹ค +##๊ฐœ๋ฐœ +์ผ๋ณธ๊ตฐ +์„œ์ • +๋งคํŠธ +ํ‘ธ๋ฅด +๋˜‘๋˜‘ +์ด๋ผ๋„ +##๋ ‰์Šค +์•„๋‹˜ +ํš๊ธฐ +##60 +๊ต์žฌ +##์Šค์นด +์ฑ…์ • +๋ฐฉ๊ธˆ +์›์†Œ +์ƒ๊ธด๋‹ค +๋ถ€์นœ +##๋ฐ”์ดํŠธ +๋ฐฉ๋ฉด +๊ดด๋ฌผ +์œผ๋ก  +##๋กœ์›€ +์•„์•„ +๋””์Šคํฌ +์ด์ชฝ +์›๋Œ€ +๊ฐ‘์ž‘์Šค +์šฐ๋Œ€ +##์ฒ ๋„ +๊ด‘์—ญ์‹œ +๋‚จ์šฉ +๋ถ„๋‹จ +KTX +##์šฐ๋ฆฌ +ํฌ๋กœ์Šค +์ค‘๋ณต +์ฐพ์•„์™” +๋ฐ˜์ง€ +์ฃผ๊ถŒ +##๊ตํ†ต +##๊ทธ๋ผ +##๊ฒฝ๊ธฐ +MS +๋“ ๋“  +์ถœ๋ ฅ +##ํ‚ค์•„ +##๋จผํŠธ +์ค‘์–ผ +๊ฒ€์ • +์ปค๋ฒ„ +๊ธˆํ’ˆ +##ey +๊ณต์—ฐ์žฅ +์•„๋‹Œ์ง€ +์‹ ์ž +์ด์ธ +์žฅ๊ธฐ๊ฐ„ +์‚ฐ๋‹ค +ํ”„๋žœ์ฐจ +๊ตญ์•… +์—‘์Šคํฌ +์—์ด์Šค +ํญ๋„“ +##๋งค๊น€ +๋•Œ๋ฆฌ +##์•„์›ƒ +##AS +์„คํƒ• +์ถœ์ œ +1972 +์ž๋ฆฌ๋งค๊น€ +๋Š๋‚€๋‹ค +ํ™์ˆ˜ +##ํ…Œ๋ฅด +์ด๋ค„์งˆ +ํ•˜๋ฒ„ +๋ณต์ œ +##ํŠธ๋กค +์•”์‚ด +์ž์งˆ +1975 +##๋ฒ ์ดํ„ฐ +๋ฐ€๋„ +์‚ฌ๋งˆ +์ž๋ฆฝ +1974 +๊นŒ๋‹ค +๋ถ„ํ•ด +๊ฒฝ์œ  +ํ”„๋žœ์ฐจ์ด์ฆˆ +184 +์ด๋ฃฐ +์ธ๋ฌธํ•™ +์šฐํŽธ +##๋ถ€๋กœ +##๋ด์•ผ +ํ”Œ๋ผ์Šคํ‹ฑ +๊ฐ„์‚ฌ +์ง‘๊ฐ’ +์Šคํ‹ธ +๋‹ค์ฑ„ +๋กœ๋ฒ„ํŠธ +##ํ…Œ์Šค +๋Œ€์ถฉ +๋ฐ›์•„๋“ค์ผ +์น˜๋ฐ€ +์ž”์น˜ +์‹ค์ „ +์œ ๋น„ +##mer +๋ณดํ–‰ +์œ ์ง„ +์ •๋ฆฝ +##๋Ÿฌ์›€ +##์—์ด +์„ ๊ฑฐ๋ฒ• +##ef +๊ทธ๋Š˜ +์‚ฌ๋ฉด +##๋ฌด๊ธฐ +ํ–‡์‚ด +์•Œ์ฝ”์˜ฌ +๋ถ€ํ•ฉ +์—์ฝ” +์‚ฐ์ถœ +##stagram +##์—๋ฅด +##iet +ํ‚ฌ๋กœ +ํ•˜์› +๋‹ฌ๋ž˜ +##ib +์‹ ์•ฝ +์ง์„  +์ƒํ•ด +์‚ฌ๋ น๋ถ€ +๋˜์‚ด +ํŒŒ์‚ฐ +์ดˆ์ฝœ +์—ฐ๋„ +๊ฐ€๋งˆ +๊ธฐํ•œ +๋ฉธ๋ง +์งœ์ฆ +์ž๋ถ€์‹ฌ +์ˆ˜๊ณ  +ํ—ค์ด +์Šน๊ฒฉ +ํ•™์‚ด +๋†“์ธ +์ฝ˜์…‰ํŠธ +์น˜์†Ÿ +๋ฌผ๊ฒฐ +II +1976 +๋ฐ•์ฃผ +๋‚˜์น˜ +ํ† ๋กœ +ํ•„๋“œ +๊ด‘๋ถ€ +ํšŒ๋™ +๊ณก์„  +๋Œ์•„๋‹ค๋‹ˆ +๋ณดํ—˜๊ธˆ +๋†€๋ž€ +์˜ฎ๊ธด +์šฐ์„ธ +๋ถ์ธก +๋‹ตํ–ˆ +##์—…์ž +๋ฏธ๋ถ„ +์•ผ๋งˆ +๊ฐœ๊ฐœ +์ „์ง„ +๋งค์ง„ +ํ•ด์ž„ +ํ˜‘๋™์กฐํ•ฉ +์™ธ์ณค +๋ณธ๊ด€ +์‚ฐ๋‹จ +ํ‰์ผ +์€ํ•˜ +##์ˆญ์ด +๋ผํ‹ด +์• ์š” +๋‹ค๋‹ +์ ๋Œ€ +๋ˆ„๋ฆด +ํฅ์› +๋‹ฌ๋ ธ +์ง„๋„ +์†กํŒŒ +์‹ ํฅ +##์‚ฌ์Šค +์•„๋ฏธ +์ผ€์ดํฌ +๋…ธ์Šค +์Šค๋ฏธ +๋‚˜ํด +์‹ ํ•˜ +์ „๊ฒฉ +๊ณต๋กœ +๋ด๋งˆํฌ +๋งˆ๋ฃจ +๋‚ด์‹  +์žกํžˆ +##์ˆ˜์ˆ˜์ƒ‰ +๋ณธ๋‹ค๋ฉด +ํ‚ค์›Œ๋“œ +OS +์‚ฌ์„ค +์˜จ์ฒœ +##์›์žฅ +์„œ์žฅ +ํƒ์‚ฌ +์••์ˆ˜์ˆ˜์ƒ‰ +์Œ๋ชจ +ํ• ๋ฆฌ +##ํŠœ๋ธŒ +์€ํ˜œ +๋†๋ฆผ +SS +์ ํ˜€ +์ž์ฑ… +๋‚ฉ์น˜ +๊ทนํžˆ +์ œ๋กœ +ํ›—๋‚  +์ฃผ์œ ์†Œ +๋“œ๋ฆฐ๋‹ค +๊ธฐ์šธ์—ฌ +์œ ํŠœ๋ธŒ +##IP +์‚ฌ๊ต์œก +๋ชจ๋ฆฌ +ํ˜๋Ÿฌ๋‚˜ +๋ง๊ธฐ +์ด๋ ฅ +##AT +ํ›„ํ‡ด +์–‘ํŒŒ +์‹คํšจ +๊ฐ€์ ธ๊ฐ€ +์ฐจ๋ฆผ +์ตœ์šฐ์„  +๋ฐฉ์†ก๊ตญ +์™ธ์ถœ +์—ฐ์‚ฐ +๋“ค์œผ๋ฉด +์ธ์‚ผ +al +๋ฌธ์ธ +ํ”ผ์˜์ž +๊ณ ๋Š” +์„ค์‚ฌ +ํ•™๊ณ„ +์ธ๊ฑด +์—ฐ๋™ +์˜๋ฌธ +๊ฐํƒ„ +์ด๋ก€ +์ œ์ฒ  +##์‚ฌ๋ฆฌ +์•ž์„ธ์›Œ +์•Œ๋ ˆ +ํ•™๋Œ€ +์žฌ์ฆˆ +์ข€์ฒ˜๋Ÿผ +๋‹ค๋ฆ„์—† +ํ•˜๊ณ„ +์Šคํ† ์–ด +CCTV +##์ €ํŠธ +์–ด์ƒ‰ +UN +์„ ์› +๋ฐ˜๊ธฐ +๊ตญ์ˆ˜ +ํ†ต์˜ +๊ท€์ฐฎ +์ •์ง +##์˜ต๋‹ˆ๋‹ค +์•ˆ๊ฐœ +ํŒ”๋ฆฌ +์ •์‹œ +๋ฐ˜๊ฐ‘ +##๋ฐ”๋‹ค +ํšŒ์›๊ตญ +##์ž์น˜ +๋™ํฌ +์••์ถ• +์ดˆ๋ก +ํ•ด์น˜ +##ํŠน๋ณ„ +##์…”์ธ  +์ด๋งŒ +์ธ๊ฑด๋น„ +์•ฝํ•ด +BMW +์–ด๊ธ‹ +๋ณด๋žŒ +์ปจํ…Œ์ด๋„ˆ +์˜ฌ๋ผ๊ฐ” +์ฒญํƒ +๊ณ ๋ž˜ +ํผ์ง€ +ํ•„์—ฐ +์„œ์  +ํ—จ๋ฆฌ +์ฐฝ์„ค +์ถฉ๋™ +ํด๋ฆฐํ„ด +์šฐ๋ฌผ +En +ํฌ๋ฃจ์ฆˆ +์ฒญ์ทจ +##qu +๋ˆŒ๋Ÿฌ +ํ”ผํŠธ +๊ธฐ์žฅ +ํ•œ๊ตญ์€ํ–‰ +ใ…œใ…œ +##ism +๊ทธ๋ž˜ํ”ฝ +๋ฐœ์ž +๋ฏธํก +์šฐ์šธ์ฆ +์ผ๊ฐ„ +##eb +๋ง๋ผ +ํ• ์ˆ˜๋ก +๋งก๊ฒจ +์—ฐํ‰๊ท  +๊ฐ„์žฅ +์‹ค๊ฐ +๋‚ด์—ญ +์ฒจ๊ฐ€ +๊ฐ€์น˜๊ด€ +์ƒˆ์‚ผ +์ž ์žฌ๋ ฅ +์˜์ œ +์ฐจ๊ฐ€์šด +์—ด๋ฆด +์ด๋ค„์ง€์ง€ +์‚ฌ์šด๋“œ +์ˆ˜์ฐจ๋ก€ +์‹ ์žฌ +์ƒํ–ฅ +๊ฑฐ์ œ +๋ˆ๋‹ค +์ „์ˆ˜ +๊ด€ํ•ด์„œ +ํ•ฉ๊ณ„ +๊ทธ๋Ÿฐ๊ฐ€ +๋ถ€์ˆ˜ +์—๋‹ค๊ฐ€ +์„์ˆ˜๋ก +##์ž–์•„ +์ˆ˜์›์‹œ +์žกํ˜€ +ICT +์œ ํ•œ +๋‚˜๋น„ +์ฒœํ™ฉ +ํ”ผํ„ฐ +##์œ„์›ํšŒ +๋™ํ˜ธ +๊ธฐ๊ฐ +๋‚š์‹œ +๋””์ €ํŠธ +##ct +ํญ๋กœ +๊ฑธ๋ฆด +์›๊ธˆ +ํ•œ๊ฒจ +โ—‹โ—‹ +๋™๊ฒฝ +๋น„๋ช… +๊ฐ€์‚ฐ +๊ตญ๋ฏผ์€ํ–‰ +๋ฐ”๊ฟจ +์ž…์ž +๊ฐ€๋‹ด +์ •์ž +179 +์ƒ๊ด€์—†์ด +ํ™”๋‘ +์‹ ํ•œ์€ํ–‰ +๊ต๋ถ€ +ํ–‡๋น› +##์น˜๊ธฐ +ํƒˆํ‡ด +##์‹œ์„ค +์‹ ๋„ +์–ด์„  +๊ตฐ๋‹จ +ํŽผ์ณ์ง€ +์—‰๋ฉ์ด +##๋ฅด๋ฅด +๋ฉฐ๋Š๋ฆฌ +์‚ฌ์ง +์นด๋ผ +๋‘”ํ™” +๋ฐฐ์ˆ˜ +๋ ˆ์˜ค +์—ํ”ผ์†Œ๋“œ +ํ’์š” +ํŒŒ์ƒ +์„ฑํญ๋ ฅ +์•„์‚ฐ +๋‚จ๋™ +์‹œ์•ผ +์ €๋ ‡๊ฒŒ +##๊ธˆ๋ฆฌ +##์‚ฐ๋ถ€ +์ดํ™” +๋จธ๋ฌผ๋ € +์™ธ์‹ +์˜ค๋งŒ +ํ”„๋ž€ +๋”๋ผ๋ฉด +๋‹ฅํ„ฐ +์ฒœ์ง€ +์˜นํ˜ธ +์ ธ์•ผ +๋ฌธ๋ฒ• +์™•์œ„ +ํ•œ๊ฒจ๋ ˆ +DNA +๋ชจ์”จ +ํ…์‚ฌ์Šค +์ง€์šฐ +์กฐ๋ฅ˜ +##ํ”„๋ฆฌ +์นœ์ • +##์ฝ”๋“œ +์ž…์ฒด +์ž…๊ตญ +๊น€์—ฐ์•„ +๋‹ฌ๋ ค๊ฐ€ +์œ„์ž„ +์ด์ˆœ์‹  +์ง‘ํ•„ +๊น€์„  +๊ฐ€์„ค +ํ•€๋ž€๋“œ +๋ฐœ๊ฑธ์Œ +์ „๋ณต +ํ˜์˜ค +Cl +์ž„๋Œ€๋ฃŒ +์œ ์˜ +์šฐํšŒ +์ถœ๊ตญ +๋‹ฌ๋ผ์กŒ +์ข…์‚ฌ์ž +์ด๋…ธ +ํฅ๊ตญ +๊ธฐ์ƒ +์ƒ์˜ +์™•์‹ค +๋ฐ•์ข… +์„์‚ฌ +์‹ค์Šต +OEC +์•ก์ˆ˜ +๊ฐ•์ • +1977 +ํŒŒ์šด๋“œ +์‹ํƒ +๋ง๋ฆฌ +์ˆจ๊ธฐ +๋’ค๋”ฐ +์›์ˆ˜ +์„ผํŠธ +์ถ”๊ฒฝ +์ด์ˆ˜ +๋ณด๋ฆ„ +์œ ๋จธ +๋ด์„œ +##์—ฐ๊ตฌ +์พŒ์  +์บ์Šค +์ˆ™์ œ +OECD +๋งž๋ฌผ +์œ„๋ฒ• +์„ฑ์ฃผ +์‹ฌ์ƒ +์นœ์ฒ™ +HD +ํœฉ์‹ธ +์—ญ๋™ +##๋จผ์Šค +๋ถ€ํ’€ +##์ง‘๋‹ˆ๋‹ค +##๊ฑธ๋ฆฌ +์ฒœ์ฃผ๊ต +์žฌํ‚ท +๋Œ€๋ฒ•๊ด€ +๊ณตํ†ต์  +๊ธ€์“ฐ๊ธฐ +ํ•จ๋ถ€๋กœ +์šฐ์•„ +1968 +##ex +์ซ“๊ฒจ +๋™๋ฐฉ +##์™€์„œ +๋ฐ€์ง‘ +๊ฐ€์ง„๋‹ค +์ทจํ•ด +ํ”Œ๋žœํŠธ +๊ตญ์ฑ„ +๋‚˜๊ฐ„๋‹ค +์•„ํ…Œ +๊ตฌ์ œ์—ญ +##tern +๋– ๋‚  +##์†Œ์‹œ +์žฅ๋‚œ๊ฐ +์ปจํŠธ๋กค +##๋ฐฉ์šธ +ํ•ด์•ผ์ง€ +์†์† +์—˜๋ฆฌ๋ฒ ์ดํ„ฐ +์†ํ•œ๋‹ค +์•ฝํ•˜ +๋Œ์•„๊ฐˆ +ํ‚ค์ฆˆ +์ฐธ์ „ +ํญํ’ +์—ฐ๋ฃจ +๋ฌด๋Šฌ +์นœ์ผ +##์Šคํฌ๋ฆผ +๋ง‰๊ฑธ๋ฆฌ +๋Œ€์—ฌ +๋งค๋ฒˆ +์šด๋ฐ˜ +##์„ฑ๊ธฐ +๋ฐ”๋”” +์›์‹œ +##๋ฉ˜ํ„ฐ๋ฆฌ +์ „๊ต์กฐ +์กฐํšŒ +##pp +ET +##์ฒœ๊ตฐ +๋งŒ์กฑ๋„ +์•„์ด์Šคํฌ๋ฆผ +๊ฑฐ๊พธ +๋Š”๊ตฌ๋‚˜ +1971 +ํƒ์ƒ‰ +๋””์ ค +๋ฐธ๋ฆฌ +๋ด‰์‚ฌ์ž +์ดˆ์ฝœ๋ฆฟ +์•„ํ‚ค +์–‘์œก +๊น€์žฅ +##ES +##์ž„๊ธˆ +๊ณ ๋ฆฝ +์ˆ˜ํ˜œ +์ž์™ธ์„  +##ํ‰๊ฐ€ +##์‹คํ—˜ +๊ฒฝ๊ณผ +๊ทธํ† ๋ก +๋„ท์งธ +์—˜๋ฆฌํŠธ +๋ชธ๋งค +์ด๊ฒฝ +๋”ธ๊ธฐ +์ƒ๊ธฐ +์นœํ™” +๊ฐ„๋‹จํžˆ +์ˆ˜ํ•„ +์˜ค๋ Œ +์—ฐ๋น„ +์ง€์žฅ +๋†์„ฑ +์ธ๊ฐ€์š” +์—‰๋šฑ +๋Š์ด +์ €์งˆ +๋‹คํ๋ฉ˜ํ„ฐ๋ฆฌ +๋†๋ถ€ +์•Œ๋ ธ +๊น€์Šน +๋ฐ์ด๋น„๋“œ +ํด๋Ÿฌ +๊ณ ๋‚œ +๋‚ ๋ฆฌ +์ด๊ธธ +๋งค๋ฆฝ +an +์„ธ๊ท  +์„ฑ๋ น +๋ณ€ํ•จ +๊ธด์žฅ๊ฐ +์ ์‹œ +์—ผ์ƒ‰ +๊ฐ•์ ๊ธฐ +์—ฌ์ „ +๋ณ€ํ–ˆ +์ง€์ง€์ž +๋ชฐ๋ผ๋„ +์˜จํ†ต +ํœด์ผ +##๊ธฐ์— +ํ•ญํ•ด +์ฒœ๊ตญ +##AR +SP +##์šด๋‹ค +์žฌ์„  +์œผ๋ผ +ํ™๋Œ€ +์ฆ‰์œ„ +๋ฆฌ์„œ +ํŠธ๋ผ +ํ•ญ๋ณต +##์‚ด์ด +์œ ์‹  +##ํด๋ฆฌ์˜ค +๊ธธ๋ž˜ +##๋ฏผ์ฃผ +๋‹ค์‹œํ”ผ +๊ต๋Œ€ +ํฌํŠธํด๋ฆฌ์˜ค +##ary +์‚ฐ์ง€ +๊ต๊ฐ +์‚ฐ์•… +##์ดํŒ… +์œ„๋ ฅ +๊ฑฐ๋ถ +๊ท€์—ฌ์šด +ใ… ใ… ใ…  +ํ•จ๋Œ€ +์ฐพ์•„์˜จ +์„ฌ์„ธ +๋™๊ตญ +๊ณ„์•ฝ์„œ +350 +ํŒŒ๋ฌธ +๊ฒฝํ˜ธ +๋ฐ•์„ฑ +์ œ๋ณด +๋„˜์ณ +์œ ๊ฐ€์กฑ +##ํƒ€์ด +๋ณธ์„ฑ +์ฟผํ„ฐ +##์†Œ์—ฐ +์„ธ์ž +์–ด๋–ก +์‹ค๋ ค +์—ฌํ•™์ƒ +๋น„๋‹ +๋‹นํ•  +๊ฒŒ์ดํŠธ +ํ—Œ์žฌ +๊ฑธ์–ด๊ฐ€ +์—…์†Œ +##ํฌ๊ตฌ +๋ฃจ์ด์Šค +๋ฐฐ์ถ” +ํŠน๊ฐ• +์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ +๋ณ€ํ•ด +๋ถ„๊ณผ +์•„ํ‹ฐ์ŠคํŠธ +๋ฆฌ๋“œ +์ฟ ํฐ +์ดํ•ด๊ด€๊ณ„ +ํŽ˜์–ด +ํ™˜์› +##๋‚œํžˆ +๋ด‰ํˆฌ +๋ณผ๊ฑฐ๋ฆฌ +๋ชจ๋‹ˆํ„ฐ๋ง +๋ฌธํ™”์œ ์‚ฐ +์ฒ˜ํ•œ +๋ผ์Šค +์”จ์•— +๋‹จ๋ง๊ธฐ +์•ค๋“œ +##ํ‹€๋Ÿฌ +์ •์˜๋‹น +##ํฌ๋จผ์Šค +ํ”„๋ ˆ์ž„ +ํ”„๋กœ์„ธ์Šค +๋‹ค์นด +๊ณต๋ฌธ +๊ตญ๊ตฐ +ํผํฌ๋จผ์Šค +๋ถ€๋ฅธ +๋‹ค๊ฐ€์™” +๋‚˜๋งˆ +์ด์˜จ +๋™์ • +์ง€์›์ž +์„น์Šค +๊ทธ์ œ +ํŠน์ง‘ +๋ถ€์—Œ +ํ„ธ์–ด๋†“ +์ด๋ค˜ +์กฐ์† +์šฉ์ด +์ง€๋ฌธ +ํ™˜์Šน +๋ฐฑ์ž‘ +์ ์„ฑ +์—ฌ๋ฆ„์ฒ  +##์†Œ๋ฌธ +##์–ด์š” +๋”ฑ๋”ฑ +์ƒ๋ฌผํ•™ +๊ฑธ๋งž +1948 +ํŽผ์ณ์ง„๋‹ค +114 +์˜ค๋ Œ์ง€ +##์ž์› +##์งˆ๋žœ๋“œ +์†์‰ฝ +์‚ฌ๋‚˜์ด +์–ธ๋ก ์ธ +181 +๋ƒ‰์ • +๋ฐ•์ง€์„ฑ +๋ชจํ˜ธ +์ตœ๊ฒฝ +๋Œ€์ž… +##์š•์žฅ +์•ˆ๋“œ๋กœ +๋ฆฌ์„œ์น˜ +์ด๋ฃจ์–ด์ง„๋‹ค +##๋™๋ ฅ +๊ฒ€๊ฑฐ +๊ด€์Šต +์ƒ๊ฐ๋‚˜ +์Šค๋ฌผ +ํŠน๊ถŒ +์—ฐ์žฌ +์ปค์งˆ +๋ฒผ์Šฌ +๋‹คํˆผ +๋ชจ์Šคํฌ๋ฐ” +๊ณต๊ฒฉ์ˆ˜ +๋‹ค์ง€ +๋„์š”ํƒ€ +์†๋ฐ”๋‹ฅ +##๋ฒ„๋ฆฌ +##ํƒ€๋ฆฌ +์‹œ๋“œ +๊ฐ„์‹  +์ด๋Œ€๋กœ +๋‹คํˆฌ +์šฐํ˜ธ +์‚ฐํ•™ +##ํผ๋“œ +๋น„์Šค +์„ฑํญํ–‰ +ํŒŒ๋™ +๋‰ด์งˆ๋žœ๋“œ +ํ˜„๊ด€ +์‹ ๋ž‘ +๋Œ€์ธ +๋ฐ•์ฐฌ +ํญ์Šค +๊ถŒ์—ญ +๊ฐ€์ ธ์•ผ +์„œ๋ฉด +๋†€๋ž +๊ฑฐ๊พธ๋กœ +์ œ์น˜ +์ดˆ์ƒ +๋ฐฐ๊ณ  +์•„๋žซ +์ž ์ • +๊ตฌ์›Œ +๋ช…ํ™•ํžˆ +๋„˜์–ด์„ฐ +๊ฐ€์ถ• +๋งˆ์Œ๋Œ€๋กœ +์ƒ๋ฅ™ +๊ด€๊ฑด +์ธ๋‚ด +๊ผผ๊ผผํžˆ +ํ˜ธํ‰ +๋ฉ”๋”” +##๊ฐ€์™€ +์ง„์ •ํ•œ +์„ฑ์ฐฐ +๋งŒ์  +ํ‘œ์ถœ +์œ ์ฃ„ +์ทจํ•œ +์ง์Šน +์•„๋ฒ„ +์†Œ๋‹ˆ +์–‘๊ทน +ํ–‰๋ ฌ +์ง€์—ญ๋ฏผ +ํ† ๋งˆํ†  +๋ณถ์Œ +##LS +๋ฐฉํ†ต +ํ™”์ƒ +##๊ทœ๋ชจ +๋„๋งˆ +ํ•œ์ธ +๋ฐฑ์ง€ +ํƒ๋ฐฉ +ํ•ต์‹คํ—˜ +์•ˆ๊ฑด +์ค„๊ณง +##์ด์น˜ +08 +์˜ฌ๋ ˆ +์‹œ์† +183 +ํ”„๋กœ๋ชจ +์ฃผ๊ต +##๊ด€์œ„ +์ฒด๋ฅ˜ +159 +๊น€๋ฌด์„ฑ +์žฅ๋‹จ +์•„๋ฅด๋ฐ”์ดํŠธ +๋ฆฌ๋ฒ„ +์Šฌํ”ˆ +์—ฐ์†Œ +##์ปค๋…• +์„คํ™” +ํ—ฌ๊ธฐ +๋นผ๋†“ +ํ•จ์ • +##๋‚˜๋ฌผ +๊ณต์‚ฐ์ฃผ์˜ +์†Œ์„œ +์ˆ˜๋ชฉ +##๋ณถ์ด +๊น€์ฃผ +์Šคํƒ€ํŠธ์—… +๊ฒฌ์ธ +๊ธ‰ํžˆ +๋งˆ์ฐฐ +๋ฏธ์Šคํ„ฐ +๊ธฐ์ธ +๋Œ์ด +ํ›Œ์ฉ +ํ™”๋ ฅ +ํด๋ผ์šฐ๋“œ +์ „์—ผ +๊ณ ๋… +๋ถˆ๋ ธ +๋‹๋ณด์ด +๊ธ‰๋ฝ +์„œ๋ฅธ +์ˆ˜๋‹ค +๋ฒ„์„ฏ +๋ฐฉ์ถœ +์‹ค๋ฆฌ์ฝ˜ +์ ‘๊ทผ์„ฑ +์ฐจ์ต +๋ณด์ขŒ๊ด€ +์ ‘์‹œ +์ €์ˆ  +์œผ๋‹ˆ๊นŒ์š” +๋„์žฅ +๋ฏธ๋„ค +##ivers +##์„œ๊ตฌ +์…”์ธ  +๋ฏธ๋„ +์‹ธ์ด +์•„๋‹ˆํ•˜ +##๊ฐ™์ด +ํœด๊ฒŒ +์„ฑ์žฅ์„ธ +์ˆ˜์ œ +1969 +##์ถ”์–ด +๋™๋ฌธ +๋ง๋ช… +๊ทธ๋ ธ +๋“ค์–ด์„ฐ +์‚ผ์ง„ +๋ชฐ๋‘ +ํฌ๋“œ +์ „์‚ฐ +์ดˆ๋ณด +๋…ธ๋ ค +์ธ๋ช… +๋ณผ๊นŒ +์ง€์นญ +ํ›„๊ณ„ +๊ฐ€์ ธ์™” +๋ฌธ์˜ˆ +ํ—ˆ๋ฌผ +##๊ณ ์‚ฌ +๊ฐ์‹ค +##ํ…Œ์ด์…˜ +์˜์ • +๋‹จ๊ฐ€ +๋งˆ์…จ +์˜์˜ +๊ธฐ๋ฆฌ +๋ชจ๋ฅธ๋‹ค๋Š” +์ „๋ฌธ์  +์‹œ์นด๊ณ  +112 +๋œจ๊ฑฐ +๋ฐ•ํƒœ +์ด๋ ฅ +ํœฉ์“ธ +์œ ๋‹ˆํผ +์ด๊ด‘ +ํฐ์ƒ‰ +๊ฒŒ์‹œํŒ +์œก๋ฐ• +๋ฏผ์กฑ์ฃผ์˜ +๊ด€๊ณ„์—†์ด +ํ”„๋ฆฌ๋ฏธ์–ด +๊ทธ์ชฝ +๋ฐœํŒ +ํ…Œ์ผ +๋ง›๋ณด +๋ชจ๋ฐฉ +LCD +์กฐ์ง +์ฃผ๋ฌด +CO +๊ฒ๋‹ˆ๊นŒ +์–ด์งธ +์†œ์”จ +##son +์ž๋ž€ +์ž์ง„ +##๋™์ž +๊ฐ€๋กœ๋ง‰ +์†๋ชฉ +๊ณ ์ž‘ +๊ณต๊ฐ๋Œ€ +๋ณ„๊ฐœ +ํŒํƒ€ +##๋ ค๊ณ  +์น˜๋ฃŒ์ œ +##hy +์„ ์žฅ +๊น€ํ˜• +๋ฌด๊ฒ +์›๋กœ +๋ง๋ฏธ +##๋ถ€์ธ +๊ฒฝ์ƒ๋ถ๋„ +๊นŠ์ˆ™ +์ฐฝ๊ตฌ +๊ณ ์–‘์‹œ +๊ณ ์†๋„ +์ง€์ฒด +์˜์„ธ +##์ˆ˜๊ธฐ +๋ ค๋˜ +๋‹ค์ด์•„ +##๋ฐฉ์†ก +##๋ฒ ์Šค +์ž ๋“ค +ํƒ์ง€ +์ฐธ์„์ž +##์กฐ์„  +๋ช…์‚ฌ +์ปจ๋””์…˜ +๋– ๋“ค +์€๊ทผ +์žฌํ…Œํฌ +๋งˆ์‚ฌ์ง€ +๋ฒจ๊ธฐ์— +์–ด๋•Œ +##ost +๋ธŒ๋ฃจ +๊ธฐ๋Ÿ‰ +๋‘๋ถ€ +์ •๋™ +๋Œ€์ค‘๊ตํ†ต +ํ—๊ฐ€๋ฆฌ +๋ถˆ๋น› +๋กœ๊ณ  +##ํˆฌ์Šค +๋ณต์‚ฌ +๊ณผ์‹ค +๊ฒ€์ถœ +๋ณด์Šต +์ˆ˜๋ฒ• +ํ›„์ž +๋ฐ•์ƒ +์—ผ์ฆ +์Šค๋ฌด +ํ•˜์ธ +์žฅ๋ž˜ +##์„ผํ‹ฐ๋ธŒ +๊ฐ๋…์› +์ธ์„ผํ‹ฐ๋ธŒ +๋‹ค๊ฐ +##์†Œํ†ต +์†Œ๊ทœ๋ชจ +ํƒ€์„  +๋งˆ์šด๋“œ +์„ธ์ž… +##๋ฌผ์‚ฐ +์ดํ˜„ +๋ถˆ์พŒ +ํ‘œ๋ณธ +ํŽธํ•œ +##TI +๋ณด๋“œ +์ƒ๊ด€์—† +๋‚ด์žฌ +ํ™œ์ง +์ง€์ผฐ +๊ฒฝ๋ณด +๋‘๊บผ +๋งˆ๋ฅธ +##AC +MOU +์—ญํ•™ +SUV +๋‹ฌ๋ผ์ง„ +๋‚ด๋‹ค๋ดค +ํ”„๋กœ๋ชจ์…˜ +๋…ธ๋ฅด์›จ์ด +์€๋ฐ์š” +์–ด๋ฏธ +๊ฒฐํ•จ +์ฃผ๋‘” +๋ฉด๋ชจ +1963 +๊ฒฝ์„ฑ +๋ฉˆ์ถฐ +ํ™ฉ๋‹น +์ฒ˜ํ˜• +์ ธ์„œ +๋งˆ์ด๋„ˆ์Šค +์น˜๊ณผ +์•„๋งˆ์ถ”์–ด +์•ž๋‹น +๊ณค์ถฉ +##ํ…Œ์ผ +ํ•ด๊ฒฝ +PR +๊ฒฉ๋ ฌ +๊ฐ€ํ•ด์ž +์„ ๋ณด์ผ +์ฐฝ์—…์ž +๋ฐœ๋ น +์–‘ํ•ด +์ค‘๊ตญ์–ด +UE +ํ›„์ง€ +๋งํˆฌ +์•„์‰ฌ์šด +์ด์ง„ +์„ฑ๋‚จ์‹œ +์ง‘์ค‘๋ ฅ +์ฆํ›„ +์‚ผ์„ฑ๋ฌผ์‚ฐ +์˜์‚ฌ์†Œํ†ต +์ด์‹ +์‹ค์žฌ +09 +is +์ผ๋Ÿฌ +๋–ก๋ณถ์ด +์‹ค๋ฆฐ +์›ํ•œ๋‹ค +##๊ฐ์น˜ +์ „๊ฒฝ +์ฃผ์˜์ž +์ •์ „ +์ฒด๊ฐ +๊ธธ๋Ÿฌ +##์ฐŒ๊ฐ์น˜ +๊ธฐ์ˆ™์‚ฌ +๋ณธํšŒ์˜ +์—ฌ๊ฒผ +ํ™์„ฑ +์ด์•ก +๊ดœํžˆ +##๋ธ”๋ฆฟ +##๋ฒ ๋ฆฌ +๊ณต๋ฃก +์ž„์šฉ +๋ชฐ๋ฝ +์ฃผ์‹ํšŒ์‚ฌ +์‹œํฅ +๋งˆ์‹  +๋‚˜์ดํŠธ +์ž…๋Œ€ +##๊ฒฝ๋ถ +##her +##๊ท ๊ด€ +๊ทธ๋ ˆ +์–ดํœ˜ +๋…ธํ™” +์ค„์ค„ +์ด์‹œ +์–ธ์Šค +๋Œ€์„ฑ +์ž์žฌ +KA +์™•๋น„ +์ง€๊ทนํžˆ +ํ•ด์ง€ +๋งˆ์ผ +์ผ์ƒ +์ค˜์„œ +105 +๋งŒ์ง€ +##๋งˆ๋‹ค +##์šด์ „ +๋Œ€๋ฌธ +๋ฐฅ์ƒ +##ํ˜ธํ…” +ํƒœ๊ถŒ +๊น€์ค€ +๋ฐ”์ด์˜ฌ +์ž”์ธ +ํฌ์œ„ +##์ง•๊ธˆ +๋†์–ด์ดŒ +##์˜ค์นด +์ •๋ชฝ +##์™€์ด +๊ณผ๋‹ค +์ •๋ฌธ +##๊ธฐ๊ตฌ +ํšŒ์ƒ +์น˜์•ˆ +์˜๋“ฑ +๋ณด์ปฌ +์„ฑ์šฐ +๋ฐฉ๋ฌธ๊ฐ +๊ตฌํ•  +์‚ฌ์‹  +ํŽธ์ฐฌ +์œผ๋ผ๊ณ  +##์ณ์„œ +##ies +์ €์ง€๋ฅธ +์žŠ์–ด๋ฒ„ +##์‹ธ์›€ +๋ฌธ๊ฒฝ +์„ฑ๊ท ๊ด€ +์ƒํ‘œ +1967 +์Œ์› +๊ฐ•ํ˜ธ +์ง€๋ฆ„ +ํŠ€์–ด๋‚˜ +์ด๋ชฉ +์—ฌ๋Œ€ +์ฃผ์ถ• +ํ–ˆ์œผ๋‹ˆ +์žƒ์–ด๋ฒ„๋ฆฐ +๋ฐฉํ•œ +ํ…Œ๋‹ˆ์Šค +##๋…ธ์Šค +##์˜ํ–ฅ +์ผ๋ณธ์–ด +ํ•ดํ”ผ +์ œ์•• +์Œ์‹๋ฌผ +##๋‚˜์ดํ‹ฐ +ํฌํ†  +##๊ธฐ์ง€ +์š”๊ธˆ์ œ +์ž‡๋‹ฌ +์ œ๋„ค +์‹ค์ƒ +##๋‹ค๋“ฌ +##ical +๋‹จ์„œ +์˜ค๋””์…˜ +๊ฐ€๊พธ +##eal +์›์‚ฐ +์œ ๋‚˜์ดํ‹ฐ +์žฅ๋กœ +์‹œํ–‰๋ น +ํ‡ด์ง„ +##ํƒœํ”„ +๊ฐ€๋ญ„ +๋™๊ฒฐ +๋‚˜๋ญ‡ +์ „๋‘ +ํƒฑํฌ +##oy +ํ›„์ž„ +ํ™”๋ณด +์—ฌ๊ฐ€ +ํ‹ฐ์…”์ธ  +์ €์ ˆ +์šฐ์‚ฐ +๋ฐ”๋กœ์žก +##์—ฐ๋Œ€ +๊ตํ–ฅ +์ฃผํŒŒ +์ง•์ˆ˜ +๋งˆ์ธ๋“œ +๊ฐ•์ˆ˜ +ํด๋ ˆ +๋‚ฉ๋“ +๊ณต๋ก  +์ด๋ถˆ +๋ ˆ๋ฒจ +๋Œ€๋™ +์ •ํ–ˆ +์›๋ง +์‚ฌ๊ด€ +์ด์„ +์†๊ผฝ +##orm +๋™์ƒ +์†Œ์•„ +ํƒ€์ˆ˜ +์ˆ˜๋น„์ˆ˜ +๊ฐ๋ณ„ +๋‚ดํฌ +์ €์ ˆ๋กœ +๋‚ ์นด๋กœ์šด +##MC +ํšŒ์˜์‹ค +์ƒŒ๋“œ +์œ ์พŒ +์ค‘์†Œํ˜• +๊ธˆ๊ฐ•์‚ฐ +๋ถ„๋น„ +๋…ธ๋ผ +์•ฝ์ • +๊ธˆ์—ฐ +๋œ์žฅ +ํšจ์†Œ +##ert +๋ฏธํ•™ +์บ์ŠคํŒ… +##ํ˜‘ํšŒ +์ˆ˜๋ฐ˜ +์“ฐ์ธ๋‹ค +๋ณ€๋ช… +๋™์‚ฌ +์ฐจ๋ฆฌ +๋ฐ•ํƒˆ +์„ธ์ˆ˜ +์ง€์ผœ์•ผ +##์ œ์ฒ  +์ด์ฒด +๋ž๋‹ˆ๋‹ค +ํฐ์ผ +##10 +์žฌ์ˆ˜ +##๋ผ์ด์–ธ +๋ฉ”๊ฐ€ +์›€์ง์ผ +๋ ฅ์‚ฌ +๋ฐ€์ฐฉ +์—ฌ๋ณด +1200 +์ˆ˜ํ‰ +์ž”๋”” +์ „์›” +์„ค๊ต +์ƒ๋ฅ˜ +ํ•ญ์Ÿ +##๋งค๋งค +##ํ‹€๋žœ๋“œ +์ตœ์šฐ์ˆ˜ +๋ถˆ์Œ +์žฅ๊ฑฐ๋ฆฌ +ํ–‰ํ•ด +์„ธ๋กœ +๋“œ๋ฌธ +ํ•™๊ธ‰ +๊ณ„ํ†ต +๋ฅด๋„ค +๊ฒฝ์ธ +์šฐ๋“œ +์—ํ”„ +ํƒœ๋ธ”๋ฆฟ +๋งˆ์„ธ์š” +์›ฌ๋งŒ +DJ +๋งˆ์ˆ  +๋งˆ๋“œ๋ฆฌ +ํˆฌ๊ตฌ +ํ•™์‚ฌ +๋ฐ”์น˜ +๋‹ค๋‹Œ +์˜ˆ๋ฏผ +์‹ ์กฐ +์ฐฝ๋‹น +ํ•œ์ž” +๊ณ ๋งˆ์›Œ +์ฃฝ์—ฌ +ํŒŒ์Šคํƒ€ +์˜จ์‹ค๊ฐ€์Šค +์†Œ์•ก +์•ฝ์„ธ +๋ถ€ํ‰ +๋‚œ๋ฆฌ +๋ ˆ์ด์ € +๋ˆ„๋ฆฌ๊พผ +์–ต์ง€๋กœ +์—ฌ์šฐ +ํ•ต๋ฌด๊ธฐ +STX +์„ ๋ณ„ +1965 +UEFA +์„ ์ฐฉ +์Šคํ”„๋ง +์ƒ๊ฒจ๋‚˜ +๊ณ ์˜ +vs +ํ›ˆ์žฅ +์œ ๋ณด +๊พธ๋ ค +์ •๋…„ +๋…ธํŠธ๋ถ +๋งˆ๋“œ๋ฆฌ๋“œ +์‹ญ์ž๊ฐ€ +๋Œ€๋ช… +๊ทผ์ ‘ +๋ฉ์–ด๋ฆฌ +๊ฒฝ์Ÿ์ž +์‚ฌ๋ฆฌ +์ ํ”„ +๊ต๋ฆฌ +์„ค๋“๋ ฅ +##์ง€๋Œ€ +๋ช…๋ช… +์Šคํƒ€์ผ๋ง +๋ผ์น˜ +๊ผฌ์ง‘ +1962 +๊ด‘๊ฒฝ +์œ ๋Œ€์ธ +๋ฒ•๊ด€ +์–ด๋ฆฐ์•„์ด +์–ด๋”˜๊ฐ€ +์šฉ๋ณ‘ +์ „๋™ +๋ฐœ๋ณ‘ +##๋ฒ ํŠธ +์ง€๊ฐ‘ +์Ÿ์•„์ ธ +๋“œ๋ž˜๊ณค +์ฆํ›„๊ตฐ +ํšŒ์ƒ +๋Œ€๊ต +##๋ˆ„์Šค +##์ปค์Šค +์„๋ฐฉ +##์ค„๊ธฐ +์กฐ๋ณ„ +##ark +๊ณ ๋“ฑํ•™์ƒ +์žฌ๋นจ๋ฆฌ +์ค€์ˆ˜ +๊ตญ๋„ +daily +๊ธ‰ํ•˜ +๋„๋•์˜€ +์–‘์ฃผ +๋ฐฉ์šธ +์•„๋ฒ„๋‹˜ +TF +๊ฒฝ์—ฐ +ํ–‰์žฅ +๋‹ค๋“ฌ +๋‹ด์ž„ +AS +๊ธฐ์—…๊ฐ€ +์ถ”๋ฐฉ +์ฑ„์ทจ +1940 +๊ตญํ•œ +์‹œ๋„ +ํ…์Šค +์ค๋‹ˆ๋‹ค +์‹ ๋…„ +๋ญ”์ง€ +๋ฌด์ œ +์‚ฌ์ด๋“œ +07 +ํ›„์ง„ +์›€์ง์—ฌ +๊ฐœ๊ฐœ์ธ +๊ฒจ์šธ์ฒ  +์—ฐ์ผ +##aw +์†Œ๋ฆฌ์ณค +178 +๋ถˆ๊ธธ +##์ง„๋ณด +์„ค๋งˆ +์ด์™„ +์ „์Šน +KO +์‚ฐ๋งฅ +์ง„ํ•ด +์ตœ์ €์ž„๊ธˆ +์นผ๋Ÿผ +์ค€๋‹ค๊ณ  +##ํ”Œ๋Ÿฌ์Šค +๋ฟŒ๋ ค +์ถฉ๊ณ  +์•”ํ˜ธ +##๋œจ๋ฆฐ +ํ˜ธ์กฐ +๊ฐ์ฐฐ +๋ถˆ์–ด๋„ฃ +๊ท€์—ฝ +๊ฐ•์ž +ํ•ดํ‚น +##ํฌํŠธ +ํ†ต์ผ๋ถ€ +๊ธฐ์ƒ์ฒญ +ํƒœ์šฐ +##orea +๋‚˜๋‰œ +ํ˜๋ € +์ฑ”ํ”ผ์–ธ์‹ญ +๋ฌธ๋‹จ +##์ ์œผ๋กœ +๋ถ™์˜€ +์ด๋”ฐ +์ ‘์ข… +์ž๋ฅด +์‚ฌ์ž„ +๋ฏธ์ฒ˜ +์‹ค์กด +์„ ๋Œ€ +์•„์„ธ +์ด๋ณ„ +##์œผ๋ฏ€๋กœ +์ผ๊ด„ +๋ฒ•์ธ์„ธ +๋งˆ์…” +##ind +2500 +์ง€์›Œ +๊ตฌ๋กœ +์ˆ ์ง‘ +ํ–‰์‚ฌ์žฅ +AT +์นจํˆฌ +ํ™˜ํ˜ธ +##๊ตฌ์„ +##๋ฅด๊ธฐ +๋‚จ๋งค +ํ˜„์ˆ˜ +๋Š”๊ตฐ +์Šคํ„ฐ +๋Œ€๋‘ +๋ฌผ๋ฆฌํ•™ +๋œป๋ฐ– +์ƒ์  +๊ฒฝํฌ +๊ฐ๋„ +์Šคํ…Œ์ดํฌ +์ •์ง„ +##๋„๋ฅด +##๋ฐœ์ „ +ํ•˜๋ฒ„๋“œ +์–ด์น˜ +ํ™”์ž +์„ ๋™ +์ˆจ์ง„ +๋„˜๊ฒผ +๋ฆฌ๋ทฐ +๋ช…์ƒ +์—ฐ๋‚ด +๊ฐ•๊ตฌ +1964 +์†๊ธธ +ํšจ์„ฑ +##์–ด๋กœ +##๊ฒจ์šด +์ž‘์ • +##์…€๋Ÿฌ +๋„๋ž˜ +์กฐ๋ฌธ +๋™์ „ +์ค‘ํ˜• +๊ฐ„์‹ ํžˆ +์ด์ŠคํŠธ +๋ฏผ์‚ฌ +์ด์ผ€ +์ข…๋ง +##sh +์‹ ๋ฌธ์‚ฌ +์‡ ๊ณ ๊ธฐ +๋ฐฐ๋Ÿด +MV +์žฅ๋ณ‘ +๋‚™๊ด€ +๋ชฐ์ž… +๊ฑธ๋ฆผ +์กฐ๋ฆฝ +์น˜๋ฅธ +๋จธ๋‹ˆ +๋ฏธ๋ค„ +๊ฑฐ๋‹ˆ +##EA +์žฌ์ž„ +๋ฐฑ๋‘ +๊ฐ๊ด‘ +์ˆ˜๋™ +๋ฏน์Šค +์ถ”์šด +๊ตฌ์ด +##๋ผ๋น„์•„ +๋ฏธ์…˜ +๋ฐ”๋‚˜ +์ง€์นœ +๋งˆ์Œ๊ป +ํฌ์ˆ˜ +์œ„์กฐ +##ํ™”๊ธฐ +์‹ฌ์‹ฌ +์‚ด์•„์˜จ +ํ™”์‚ฐ +๋„๋ฏธ +ํ˜„์กด +์žฅ๋ก€์‹ +์ฒญ์ค‘ +๋ฏธ๋ฏธ +๋‚จ๋ฏธ +์—ฐ์–ด +๋ฒ„๋”” +##๋Œ์ด +๊ฒฝ๋ถ€ +##๋ฐํƒ€ +##์†Œ๋…„ +์ถ”์‹  +##ace +ํ• ๋ฆฌ์šฐ๋“œ +๋น ์งˆ +๋šœ๊ป‘ +๋ถˆ์ƒ +Sch +##oT +๋ฐ•์›์ˆœ +๋ฒ ๋ฆฌ +๋ž€๋‹ค +๋‚ด๋‹ค๋ณด +์ ๋ฆฌ +ํ† ์–‘ +์šฐ์›” +๋ถˆํŽœ +ํ”„๋ ˆ์Šค +์ธ์ง€๋„ +##๋„์–ด +##๊ฒน์‚ด +๋ผ๊ตฌ +๋Œ€ํ”ผ +๋•Œ๋ ค +๊ณ ์ฐฐ +ex +๋‹น๋‡จ๋ณ‘ +##๋ฃจ๋ฏธ +๊ณตํŒ +์กฐํ˜• +์ผ์ฐ์ด +์ด๊ฒฌ +๊ตํ†  +ํด๋กœ +์งˆ์ฃผ +์†ํฅ +์ค‘ํ•™์ƒ +ํ…Œํฌ๋…ธ +๋งˆ์‹ค +์šธ๋ ค +์ด์ƒ‰ +##๊ธฐ์•ผ +๋ถˆ์ฐธ +์œผ์„ธ์š” +๋ถ„์ˆ˜ +##๋ถ€๋ถ„ +##๋†’์ด +ํ•„๋ฆฝ +CS +์„œ๋ธŒ +๋˜์ง„ +CP +๋ช…๋™ +์•„์ฃผ๋จธ๋‹ˆ +์ ˆ์ œ +๊ฐ€ํŒŒ +๊ฒฝ์ƒ๋‚จ๋„ +์ฟ ๋ฐ” +๋‚ ์•„๊ฐ€ +์ƒค์›Œ +์‚ผ๊ฒน์‚ด +์ฃผ์–ด์ง€ +์Šค๋Ÿฌ์›€ +์ด๋ฃฌ๋‹ค +๊ฐ€๋Š  +์›์ž์žฌ +์†ํฅ๋ฏผ +๋Œ€์š” +ํ•˜์ด๋‹‰์Šค +์ž…๊ฐ +๋งํฌ +๋‹ค์Šค๋ฆฌ +##๊ณ ์†๋„๋กœ +์นจ๊ณต +๊ด€์ง +์žฅ๋‚จ +ํžˆ์–ด๋กœ +๋‘๋‡Œ +์บ ํ•‘ +์˜คํ‚ค +##ํ”ผํƒˆ +ํฌ๋‹ค +์ง€๋‚œ๋ฒˆ +์ž…์ˆ˜ +๋ฒ„๊ฑฐ +##์ธ์ง€ +๋‹ˆ์Šค +๊ธฐ์—…์ฒด +๋กฏ๋ฐ๋งˆํŠธ +##ํ•˜๋ฃจ +๋ฐฉ๊ณผ +1946 +##์…€๋กœ๋‚˜ +์ง€ํ‚ฌ +ํ’์† +๊ฒŒ๋” +์‹ค๋ช… +๋‹ฌ๊ฑ€ +##orld +๋ชจํ†  +๋ฐ”๋ฅด์…€๋กœ๋‚˜ +๋ฒ•ํ•™ +์ฒญ๋‚˜๋ผ +๋ฌธ์ฒด +##์‚ฌ๋žŒ +๋ณต๋„ +##์˜ค๋ฆฌ +์„ธ๊ณ„๊ด€ +์ผ์ฐŒ๊ฐ์น˜ +๋ฐ•๊ณ„ +##ํผ๋Ÿฐ์Šค +ํšก๋‹จ +ํ•œ์‹ +์‹ ๋‹ค +์ž…๋ฌธ +๋ธŒ๋ ˆ +ํผ์‹œ +์ทจํ•  +์›์ฃผ๋ฏผ +์ถฉ๋‹น +ํ•จ์–‘ +1966 +Bl +์Šค์œ™ +##๋ ˆ๋‚˜ +๋‚ด๋ฅ™ +pr +๋ฃจํŠธ +์œ ์˜ˆ +์ถ•๋ณต +๋Œ€๊ณต +๋ƒ‰์ „ +๋ฒ•์ œ +๊ตฐ์‚ฐ +๊ณต์‹  +ํšŒ๋กœ +ํŒฝํŒฝ +ํญ์—ผ +ํฌ๋ ˆ +๋ผ์ด๋ฒŒ +์Šน๊ณ„ +๋Šฅ๋™ +์ƒ‰๋‹ค๋ฅธ +SC +์ผ๋ช… +๋‚ด์„ธ์›Œ +ํฌ์Šคํ„ฐ +์†Œ์‹  +๋‹ˆ์ฝœ +๋กœ์›€ +์‹œ์กฐ +ํ™ˆํ”Œ๋Ÿฌ์Šค +๋‹จ๊ณจ +##๋””์›€ +์—๋ฅด +๋ชจ๋“ˆ +์‚ฌํšŒ๋‹จ์ฒด +102 +์ง„ํ•œ +sp +์˜ˆ๋ป +์ด์ค€ +์ผ์ปซ +๋ชจ์š• +ํŒ๊ต +##์ง„๋ณด๋‹น +์ „ํ•œ +์ข…๊ฒฐ +์†์ˆ˜ +Sc +##RA +์ˆœ์กฐ +์ฟ ๋ฐํƒ€ +##MA +๋ฐ•์ง€์› +์Šต๋“ +์—ฌ์ • +๊ด€๋‚ด +๋‹ค์ €์Šค +ํžˆํ‹€๋Ÿฌ +๊ฐˆ์ƒ‰ +IoT +๊ฐ•๋ฌผ +์ƒ๋ช…์ฒด +๊ฒฐ๊ณผ๋ฌผ +๋…ธ๋ถ€ +##IST +๋ฒ—์–ด๋‚œ +์ง€๋ฉด +ํ›„์œ  +ํƒ€์ด๊ฑฐ +๋ถ€์† +๊ฒฐ์‚ฐ +ํํ•˜ +ํ’€๋ ค +๊ณต์˜ˆ +์ „ํˆฌ๊ธฐ +##๊ณต์ • +๋งˆ๋ƒฅ +ใ†์ค‘ +๋จธ๋ฌผ๋Ÿฌ +๊ณต๊ตฌ +์ƒ์‹œ +์˜์žฌ +##์‚ฌ์ด +์žฌํŽธ +##te +์ €๋งˆ๋‹ค +ํŠธ๋ ˆ +๋ฐ›์•„๋“ค์˜€ +##๋Œ€๊ธฐ +์™ธ๊ด€ +##์—ฌํ–‰ +๋…นํ™” +๋ฌด์ˆ˜ +##๋ ˆ์ผ๋ฆฌ์•„ +1961 +ํ†ต์ฐฐ +์žฅ๊ฐ‘ +๋ฒ„ํ„ฐ +์™ธ์ƒ +๋ณผ๋„ท +๋ผ๋“ ๊ฐ€ +##๊ทธ๋ฆฌ +์Šค์บ” +๋ฆฌ์ฒ˜ +์ •์„  +์ฆˆ์Œ +108 +ํƒ€๊ณ  +์• ๋งค +๊ตญ์™ธ +๋™์  +##ah +์žฅํŽธ +์ถ”์‹ ์ˆ˜ +์˜ฅ์ˆ˜์ˆ˜ +์ฒญ๋ ด +์žฅ๋งˆ +ํšŒ๊ณ  +ํ•ด๋ฆฌ +๋”์œ„ +์ง€๋ฆฌ์‚ฐ +ํ‘œ์ฐฝ +##ํ‚คํผ +์นด์ง€ +๋งค๋ฌผ +์ฐจ์ข… +##ick +CN +be +##์‹œ์Šค์ฝ” +ํ•œ๋ณต +##ty +##ts +๋ณ€ํ˜ธ์ธ +๋„๋‹น +tv +์šธ๋ฆ‰ +์‚ฌ๊ฑฐ๋ฆฌ +ํƒˆ๋ชจ +์‚ฌ๊ท€ +์œ ์ต +๊ธด๋ฐ€ +์ž…์–‘ +๋„๋„๋“œ +๊ณจํ‚คํผ +ํ•˜ํ–ฅ +์ •์  +##์—์Šค +##ํŒŒ์ด +์•…์žฌ +ํ•‘๊ณ„ +์ˆ˜๊ฐ +ํŽ˜๋ฏธ +๋‚จ์ง“ +์†Œ์ƒ +์‡ผํŠธ +##TO +ํ•ด์ˆ˜์š•์žฅ +๋ฌ˜์ง€ +๋ฏผ์„  +์‹œ์ง„ +์‹ ์ง€ +ํœ˜๋ฐœ +๊น”๋ ค +์ „์žฅ +๋ชจ์–‘์ƒˆ +ํ˜„๋Œ€์ธ +๊ฐ€๋ฏธ +์น˜๋ € +๋…์ฐฝ +๋ฒˆ์˜ +182 +์ ๋‹นํžˆ +๋„“ํ˜€ +์›จ์ŠคํŠธ +๋ฒค์ธ  +๊ตญ์ œ๊ณตํ•ญ +ํ•„ํ„ฐ +ํ†ตํ•ฉ์ง„๋ณด๋‹น +๋Ÿฌ๋‹ +๋Œ€๊ตฌ๊ฒฝ๋ถ +์„œ๋ฌธ +์‹œํฌ +๊ณ„๋ชฝ +๋น ์ ธ๋‚˜๊ฐ€ +๋Œ์•„์˜ฌ +์‹œ์„ค๋ฌผ +๋ฏธ๋ถ„์–‘ +์Šคํ‹ฐ๋ธ +์‹ค๋ฌผ +๋ˆˆ๋ถ€ +##๋ ‰์…˜ +ํ†ต๊ณ„์ฒญ +##์ง•์–ด +์ž…๊ฑด +๋„“ํžˆ +ํ‰์•ˆ +ํ•ด์˜จ +ํšŒ์ƒ‰ +์ทจํ–ˆ +์ฒด์ฝ” +ํŽผ์น  +๋ฅด๋…ธ +์กฐ๊ทธ๋งŒ +์‚ฌ์ดํด +๋ง‰ํžˆ +์ดˆ๊ณ  +125 +์„ ๊ต์‚ฌ +์ˆ˜์š”์ž +ํŠธ๋ ˆ์ด๋“œ +์ผ๊นŒ์š” +ํŒŒ์šฐ +๋ƒ‰๋™ +์ „๋‘ํ™˜ +์„ผ์Šค +์ด๋… +ํŒŒ์ธ +์ƒํ•˜์ด +๊ฐ•์ง„ +##ug +์งˆ๋ € +์Œ์•…ํšŒ +์ฆ์˜ค +๊ธˆ์„ธ +##๋ฒค์…˜ +์„ ๊ด€์œ„ +๋งŒ์„ธ +์œ ๋‚œํžˆ +์Šน๋ ค +์ถœ์ฒ˜ +##๊ธˆ์ž๋ฆฌ +์กด๋‚˜ +์œ ์Šน๋ฏผ +ํ”ผ๋‚œ +์›Œํ‚น +์ฆ์„ค +๋ฌด์ž‘ +๊ฐ€์„ธ +##ํ•„๋” +๋ฏธ๋“œํ•„๋” +ํŠธ๋žœ +๋ณด๊ธˆ์ž๋ฆฌ +๋™๋Œ€๋ฌธ +ํ• ์ง€๋ผ๋„ +๋Œ€ํฌ +##์„ฑ์ด +์ˆ˜์ฒฉ +๊ฒฝ๊ฐ +๊ณต๋ชจ์ „ +์†Œ๋ฐ• +์šฐ์ƒ +##๋ŸฐํŠธ +๋ง‰๊ฐ• +๋ฏธ์ƒ +##ock +๋ฐ”์ด์–ด +์—ฐ์ดˆ +๋‹ค๋…€์™” +์•ˆ๋“œ๋กœ์ด๋“œ +๋ง์› +ํ•ฉ์น˜ +์ž์น˜๊ตฌ +๋ณต์žฅ +์ž๋น„ +##๋‚˜๋“ค +์ƒ์ „ +ํŠน๊ตฌ +๋™์„ฑ์•  +๊ด€์ œ +๊ธฐ๋“ +ํฌ๋ผ +๋‚จํ•ด +##iss +์ข…์กฑ +์ดˆ์ฝ” +์™ธํ™” +๋ฌธํ•ญ +##one +๋ผ์ธ์—… +๊น€์ฐฝ +์ „์‹œ๊ด€ +๋’ค์ชฝ +ํ‰์ƒ +๋‹ฌ๋ผ์งˆ +ํ•˜ํŠธ +๋น„์ถฐ +ํด๋Ÿฌ์Šคํ„ฐ +##๋ธŒ๋ผ +์›์ˆญ์ด +๋‹ฌ๋ผ์ง„๋‹ค +##IT +์–ด์ด +##๋ชจ๋‹ˆ +๋น„ํ•ด์„œ +Univers +CG +Ex +ํŒจ๋ฐ€๋ฆฌ +๋‹ท์ปด +์•ˆํƒ€๊นŒ์šด +##๋น„์น˜ +๊ตญ์‚ฌ +๋™์ฐฝ +๋‹น์‚ฌ +๋‚จ์‚ฐ +์‹ ์ฐจ +๋ณ€ํ™˜ +ํด๋ฆญ +๊ณผ๋ฐ˜ +ํ…Œ์ดํ”„ +๊ธฐํ”ผ +์งˆ๋Ÿ‰ +๋“œ๋ก  +์˜ค์ง€ +๊ฒฌ๋”œ +์• ์จ +๋Œ€๋‹ด +ํ† ๋ฆฌ +์œ ์ˆ˜ +์•„์ธ +๊พธ๋ฏธ +์ ‘ํ•œ +์ค‘์•™์€ํ–‰ +์†ŒํŒŒ +์™•๋ณต +##io +๊ฒธ์† +##๋“œ๋ ˆ +์ „๋ผ๋‚จ๋„ +๋ถ™์ธ +๋‚ด์ •์ž +์„œ์šธํŠน๋ณ„ +๋ช…์ œ +์„ธ์ธํŠธ +์ปจ๋ฒค์…˜ +##๋ชจ์Šต +์žฅํ„ฐ +์–ด์—… +์†์žก +์˜ฌ๋ฆฌ๋ธŒ +์ ˆ์ • +์ฐฐ์Šค +๊ฒฐ์‹ค +์š”๋™ +ํด๋ Œ +์Œ๋ ฅ +๋กœ์ฆˆ +๊ธˆ์ „ +์กฐ์„ ์ธ +##ํ”Œ๋ ˆ์ด์…˜ +pro +์‚ฐ๋ฌผ +๊ถŒ์ต +๋‚ ๋ ค +๊ธฐ๋ฏธ +๋น„๋น„ +์˜ค์ŠคํŠธ๋ ˆ์ผ๋ฆฌ์•„ +๋‚ด์‰ฌ +๊ถ๊ธˆ์ฆ +์‹ฌ์ธต +์ง๊ถŒ +๋…๋ ค +##์ฒด์Šคํ„ฐ +##๋“€์„œ +์‹ค๋ฒ„ +๊ฑธ๋ฆผ๋Œ +์˜ค์„ธ +์ž๊ถ +์—ฌ๊ธด +๊ฒฝ์˜ํ•™ +ํ”„๋กœ๋“€์„œ +๋ฌด๋‹จ +๊ฒฝํ’ˆ +์‚ฌ๋ฒ” +##๋ฅดํƒ€ +1949 +##์ƒ๋ช… +๋ฐœ์†ก +๊ทผ์ ˆ +๊ธฐ์ € +๋ชจ์ž๋ผ +์›ํ”ผ์Šค +๋ฐ˜๊ฒฉ +Joh +์™ผ์† +๋‹น๋‚˜๋ผ +์•ผ๊ตฌ์žฅ +๊ธฐ๋…๊ด€ +๊ฐ€์ ธ์˜จ +์šธ์‚ฐ์‹œ +๋‘๊ป˜ +๋งŒ๋ฌผ +ํ‰๋ฉด +๋Œ€๋ณธ +##ํ‡ด๊ทผ +##ํ•˜๋ผ +๊ตญ๊ณ  +๋นˆ๋ฒˆ +์†Œํญ +์š”๋ น +์•„ํ…Œ๋„ค +์—ฌ๋ฐฐ์šฐ +๋ฐ•์ง„ +##ement +Press +์ƒดํ‘ธ +์ •๊ด€ +๋‚˜๋ฅด +##๊ฒฝ์ œ +๋ฉ‹์žˆ +๋น„๋Œ€์œ„ +์—ฌ๋™์ƒ +ํ‡ด์ถœ +์„œ์žฌ +๋‚ด๋…„๋„ +##ํ…Œ๋ž‘ +์นผ๋ฆฌ +์—๋ฒ„ +๊ฒฝ์ง€ +๋ผ์ง€๊ณ ๊ธฐ +์ฃผ์ €์•‰ +๊ฐ‡ํ˜€ +##๋ผ๋ฏธ +๋ถ€์˜ +์‚ฝ์ž… +๊ฐœ๋ฏธ +์œ ์„  +๊ทธ๋Ÿฌ๋‹ค๊ฐ€ +์‘๋ชจ +์ •๋ฅ˜ +์ฝ”๋ ˆ +360 +๋ฏผ์˜ํ™” +ํ•˜์™€์ด +๋‚จ๋‹ค๋ฅธ +๋‹คํฌ +๋ฌต๋ฌต +์‹œ์ผ +๊ตญ์ง€ +์ •๋ณด์› +์ฐจ์„  +์•„์›ƒ๋„์–ด +ํ†ต์ง€ +##๋ฆฌ์•ˆ +์›๋‹จ +ํ•˜๋ฝ์„ธ +๋’ค์ง€ +๋“œ๋ฆฐ +์›ํ–ˆ +์„ ์  +๋ฏผ๋ฒ• +์‚ผ์„ฑ๊ทธ๋ฃน +๊ณผํƒœ +๋งˆ์ฐจ +์ œ์ž‘์‚ฌ +์ „๋… +์Šฌ๋กœ๊ฑด +๊ตฐ๋ฏผ +ํ™”์„ +๊ฐœ๋Ÿ‰ +๋„˜์–ด์„  +๋ฐ”๋€” +ํ‚ฌ๋กœ๋ฏธํ„ฐ +๋…ผํ‰ +์‚ฌ์‚ฌ +##lic +##DA +๊ธฐ๊บผ +๋™์š” +๊ณ ๊ตฌ๋งˆ +์›”๊ฐ„ +๋†์ง€ +๋ฒˆ์ฉ +๋ฌดํ˜• +์‚ฌ์œ„ +๊ณผํƒœ๋ฃŒ +์ด๊ฒผ +์ด๋ฃฉ +##90 +์–‘ํ˜ธ +๋”๋“ฌ +๋ฒ—๊ฒจ +์ง€์Œ +##ach +์—ด๋ง +์ง€์งˆ +๋‚ด์ „ +Man +sh +์‹ค๋กœ +์•ผ์‹ฌ +##์ง€๋ฅด +ํ‡ด์ž„ +๊น€์œค +๋“œ๋ผ์ด๋ธŒ +ํ‘œ๋ฐฉ +์˜๋Œ€ +์ด๋ˆ +##mp +์ง‘์–ด๋„ฃ +๋ถ„ํ™” +์—ฐ์ฒด +ํŒŒ์Šค +์น ๋ ˆ +์˜ํ™”๋ฐฐ์šฐ +์ ˆ๋ฒฝ +ํ† ๋„ˆ +๋„๋งค +ํ• ๊นŒ์š” +์•„๋น„ +์–ด๋จธ๋‹˜ +๋ง‰ํ˜€ +##์Šˆํƒ€์ธ +์œ ์–ธ +์ฐพ์•„์™€ +๋ฒ—์–ด๋‚  +์„ฑ๋งค๋งค +๋‘๋ ค์›Œํ•˜ +์œ ๊ด€ +๋งŒ๋‘ +์ ๊ธˆ +์Šค์ฝ”ํ‹€๋žœ๋“œ +Pl +๊ณ ์‹ฌ +Ed +์†ก์ด +๊ท€ํ•œ +์ƒ์ž„์œ„ +์Šน์ž +๋กœ๋งจ์Šค +๋ฌด์•ˆ +ํ•ธ๋“œํฐ +์กฐ๋ง +์ผ์œผํ‚ฌ +##๋ฒŒ์ด +์ œ์ž๋ฆฌ +##๋ฌด๋ผ +ํ•ฉ๊ฒฉ์ž +์‹ ์šฉ์นด๋“œ +๊ถŒ์žฅ +##์†Œ์‹œ์—„ +ํƒœ์ข… +๊ธธ๊ฑฐ๋ฆฌ +EBS +์ž ์ˆ˜ํ•จ +ES +์ปจ์†Œ์‹œ์—„ +๋ฏธํŒ… +์—ฐํ•ฉ๊ตฐ +์ด์‚ฐํ™” +ํƒ€์„ +ํƒ€๊ฒฐ +๊ฐ„ํ–‰ +ํฌ๋ฏธ +์‹ธ์›Œ +์œ ๋Ÿฝ์—ฐํ•ฉ +๊ท€์ค‘ +##ํฌ๋ฆฌํŠธ +๊ถ์ „ +๋งŒ์ฐฌ +์ฆ๊ฐ€์„ธ +##์—‘์Šค +์ •์‚ฐ +์™œ๋ƒ๋ฉด +๊ฐ•๊ตญ +๋ฐ์Šค +๊ต๋„์†Œ +๊ฑฐ์•ก +##๋ผ๋…ธ +์„œ์šธํŠน๋ณ„์‹œ +์ƒ๊ณ  +์„น์‹œ +##ํ™”๋ฌผ +๊ผฌ๋งˆ +๋ธŒ๋ ˆ์ดํฌ +KCC +์‹œ๊ณต์‚ฌ +๋ณธ๋ฌธ +์˜์™• +์ฃผ์ƒ +ํ’๊ธฐ +์Œ“์ด +์„ธ์† +##๋งˆ๋ฅด +##๋ ˆ์ดํ„ฐ +์—ญ์„ธ +์ง€ํœ˜์ž +๊ตฌ์‹ค +๋–จ๋ฆฌ +์‹œํ•œ +ํ•˜์ฒญ +๋ถ€์ง€๋Ÿฐ +๋ช…์‹ฌ +์‹œ๊ทธ +๋Š‘๋Œ€ +ํ’๋ ฅ +๋ฃจ์ˆ˜ +์Šค๋ฉฐ +103 +์ €์ฃผ +๋™๋ช… +go +๋ฃจ์Šค +111 +๋ชจ๋ฐœ +##ํ•˜์ด +๋น„๋น” +๋ถˆํ‰ +์˜๊ฑฐ +๋Œ์–ด์˜ฌ๋ฆฌ +์นœ๊ทผ +##๋ฐ์ดํŠธ +์•„๋งˆ์กด +๋‚˜์นด +ํ™์‚ผ +์ง€์ค‘ +๋Œ์—ฐ +๊ฐ•๋Œ€ +๋‚ด์…”๋„ +๊ทธ๋ฆด +๊น€์ธ +์ž„ํ•˜ +##ํ•ฉ๋‹ˆ๋‹ค +Br +์œ ๋ น +๋ฌด์‹ฌ +ํ‘œ์  +##๋‚˜์™€ +๋ฐค์ƒˆ +์ผํ™” +์œ ์ € +ํ”ผ์Šค +์‚ผ์ดŒ +๊ฐœ์› +์ผ์–ด์„œ +๋ฆฌ์ฒ˜๋“œ +๊ธฐ์šฉ +์˜ค์ง•์–ด +์งˆํˆฌ +ํ—ˆ๊ณต +์˜์„ +๋‚˜์ฃผ +์—ฐ์‡„ +ํญํฌ +๊น€ํฌ +๋ฐ”์ง +์›จ๋”ฉ +๊ฑฐ์ฐฝ +##ํ‹ฐ์Šค +๊ณจ์ž +์œค์„ +##ํ”„๋ ˆ +##ํ”„๋ž€ +์ธ๋งฅ +##GA +๊ณ ๋ถ€ +์—…๋ฐ์ดํŠธ +##ํ…Œ๋ฉด +ํž™ํ•ฉ +์ด์ฐฝ +ํŠน๋ก€ +๋‚จ๋ถํ•œ +##ual +์ฃผ์‹œ +๋ฐ”์šธ +ํ• ๋‹น +์˜ค๋ฅธ๋‹ค +๊ฑฐ์Šฌ๋Ÿฌ +์ƒ๋žต +158 +์„ฑ๋ณ„ +๋™๋“ฑ +์•„๋‹ด +##ํŒก์ด +์„ธ๋ก€ +๊ฐ„ํ˜น +ํšŒ์˜์†Œ +๋งˆ์“ฐ +์ฝ˜ํฌ๋ฆฌํŠธ +##๊ตฌ์žฅ +๋ฐœ์กฑ +๋ธ”๋กœ +ํ•ด๋‹ต +์„ฑ์› +์œ ๊ธฐ๋† +์ด์œฝ +์ž ์ž๋ฆฌ +๋Œ€๋ฆฌ์  +๋ฏธ์ฃผ +์ •๋ฐ˜ +๋งŒ๋ฃŒ +๋ถ๋ฐฉ +์Šต์ง€ +##์ ค๋ ˆ์Šค +##๋งˆ๋ฆฌ +๋ฏธ์–€ +๋ฒš๊ฝƒ +์ŠˆํŒ… +๊ทธ๋ผ์šด๋“œ +ํผ์‹œํ”ฝ +##์Šคํ‹ฑ +ํ’์ž +๊น€๋ช… +๊ฐ€ํ˜น +์—ฌ๋Š +ํŒํƒ€์ง€ +##๋„ค์Šค์ฝ” +๋‹ค์ด์•„๋ชฌ๋“œ +##yst +##์˜ฌ๋ž +๊ธฐ๊บผ์ด +ํฌ๋ถ€ +##ํ”Œ๋ ‰์Šค +๊ณ ๋ถ„ +๋ณธ๋ณด +##๋Œ€๊ต +##์ „๋ ฅ +154 +์ŠคํŒŒ์ด +์‹œ๋ฉ˜ํŠธ +๋ฏธ์•ผ +์•Œ๋ฐ” +๋“ญ๋‹ˆ๋‹ค +##๊ด€๊ด‘ +์‚ฐํ–‰ +##๋ฐ์š” +์ œ๊ตญ์ฃผ์˜ +๋ฒ…์Šค +์ฃผํŒŒ์ˆ˜ +์‹œ๋‹น +๋Š๊ปด์ง„๋‹ค +ํŠน์ƒ‰ +์ด์œฝ๊ณ  +์นจ์ฐฉ +์‹ค์‚ฌ +ํ”„๋žญ +๋ฐ”๋‹ท๊ฐ€ +์˜๋™ +##๋ถ์ž +์•„๋ฆฌ๋ž‘ +์›๋™๋ ฅ +์•ฝํ’ˆ +๊ฐ•์ „ +์š”๋ ‡๊ฒŒ +์นด์ง€๋…ธ +๋ด‰์‡„ +์ ์ ˆํžˆ +๋‚ฎ์ถฐ +์ฃผ์ง€ +##โ”€โ”€โ”€โ”€ +๋‘˜๋ ˆ +์–ด์•ผ์ง€ +์ง๊ฒฐ +##BA +๊ทน๋„ +์—๋ฆญ +ํญ๊ฒฉ +๋“ฑ์žฌ +์šฐ์ฒด +ํ”ผ์ง€ +1900 +์ƒ‰์ฑ„ +์ฆํญ +115 +์ˆ˜์˜์žฅ +์ค‘์œ„ +##๋ผ๋ฆฌ +##๋””์Šค +๊ธ‰๊ฐ +ed +์ด์šฉ๊ฐ +์€ํ +์—ฐ์ฃผ์ž +์ˆ˜๋Ÿ‰ +##์–ด๋จน +์žฅ์ž +๊นƒ๋ฐœ +ํ•˜๋‚จ +177 +์ด๋ฐฉ +ํ„ฐ๋œจ๋ ธ +ํŠน๊ธ‰ +์ค‘์ถ” +์ด๋ฅผํ…Œ๋ฉด +์–‘ํ‰ +์šฐ๋ผ +์„œ์—ด +์ž์ทจ +๊ธฐ์Šต +์ƒํ™œ๋น„ +ํฌํฌ +๋ฏธ์“ฐ +ํ…ƒ๋ฐญ +##ใ…œใ…œ +ํ›„๊ณ„์ž +๊ฐ€ํ‰ +์–ดํ•™ +๋น„๋ฒ• +๋ฉ‹์ง€ +์ฒ˜ํ•ด +๋“์—ฌ +๋งจ๋‚  +๊ฐ€์ ธ์˜ฌ +##๋‚˜ํƒ€ +##๋™์› +์ „์ž‘ +์นด๋ฅผ +์Šน๋ฌด์› +์Šคํƒ€๋””์›€ +๋งˆํ‹ด +ํ€ด์ฆˆ +ํ˜ธ์ „ +์šฐ๋Ÿ‰ +๋…ธ์ˆ™ +๋™์กฐ +ํ›„์œ ์ฆ +๊ตญ์ฑ… +๋ฌด์‚ฌํžˆ +์‹œ์•„ +ํ‘œ๊ฒฐ +##๋กœ๋ฆฌ +##์ง€๊ธฐ +์กฐ์žฅ +์‹ ์› +์œ ๋„ค์Šค์ฝ” +์š•์„ค +ํŠธ๋ ˆ์ด๋‹ +##ology +์ต๋ช… +ํฌ์ผ“ +ํ›„๋ฐ˜๊ธฐ +##ign +์•ผ๋“œ +์ฐจ์ด์  +##์ฆˆ๋‹ˆ +Le +์ •์ฃผ +ํ—ค๋ฅด +##ํƒœ์šฐ +์ •ํ˜• +์กด์Šจ +๊น€๋ฌธ +์ง„ํ–‰ํ•  +๋ฏผ์ž +์ผ์กฐ +๊ตญ๊ฐ +๊ณ ๋น„ +์ „๋Œ€ +์žกํžŒ +์„ฑ์ถ” +์ €์กฐ +๋‚˜ํด๋ ˆ +์Œ“์ธ +##๋ฒจํŠธ +๋ฐฐ๊ธ‰ +์„ž์—ฌ +์—ฐ๊ธฐ์ž +๋น…ํ†  +##๊ฐ€์กฑ +๋ฌปํ˜€ +##๋‚˜๋ฆฌ +๋ฐ•์ธ +๋‹ค๋ฌผ +##๋ฒŒ๋ ˆ +##๋ณธ๋ถ€ +์ „์ผ +์•ก์ฒด +##ance +๋ณด์Šคํ„ด +์˜ค๋ฆฌ์˜จ +๊ธฐ์งˆ +Ad +๋งˆ๋งˆ +์ถœํ‡ด๊ทผ +๋ฐ•์šฉ +##ult +๋˜๋ž˜ +##ire +##๋•Œ๋กœ +๊ฐ€๋“œ +๋ถˆ๊ณต์ • +ํ–‡๋ณ• +ํ•œ๊ป +##์ •์‹ +๋‚ฉ์ž… +AR +์˜ฌ์Šคํƒ€ +##๋ฒ”์ฃ„ +๊ฒฝ์Ÿ์‚ฌ +์ฃผ์Šค +๋ณด๊ฑด๋ณต +##๊ฟˆ์น˜ +Ph +ํŒŒ๊ธ‰ +145 +์ƒ๋ฐ˜ +๋งˆ๋ฒ•์‚ฌ +์ข…์† +์ˆญ๋ฐฐ +๋‹จ์—ฐ +์ต์Šค +๋ณธ์  +##ํˆฌ๋ฆฌ +์ •๊ณ„ +์ธํ…” +ํ†ต์—ญ +##์Šคํ…Œ +๋”ฐ๋Œ +๋Œ€์œ„ +MVP +ํƒœ์กฐ +๊ธฐ์ˆ ์ž +##์ธ๋ฌผ +์†ํ•ด +๋…ธ๋…„ +๊ฑฐ๋ž˜๋Ÿ‰ +๊ณ„ํŒŒ +์ง€ํ‰ +์œ ํฌ +##rom +๋ฒ ํ…Œ๋ž‘ +##์ƒ๋ถ€ +์šฐ์ต +๋ณดํƒœ +์˜คํ† ๋ฐ”์ด +๋ถ„์ฃผ +๋‚ด๋ฑ‰ +ํ’ˆ์ข… +ํ›„๋ณด์ž +๋ณด๊ถ +๋ ค๋‹ค +##elf +๋นˆ๋„ +์ž์ • +์‚ฐ์žฌ +์‹ค์ฆ +๋‚ด๋ ค๊ฐ” +์„ฑ์ถ”ํ–‰ +ํŠน์‚ฌ +๋ฐฉ์‚ฌ์„  +์‹œ๋™ +๋ณดํŠธ +๋ˆˆ๋™์ž +๋‰ดํƒ€์šด +์ „๋ก€ +๋ฐฉ์—ญ +์‚ฌํšŒํ•™ +์‹œ๋‚ด๋ฒ„์Šค +๋“ค๋Ÿฌ +ํšŒ์‹ +์‚ฌ์ดŒ +##๋งˆ์ด +์…€ํ”„ +์•ฝ๊ตญ +์ œ๋™ +์ด๋ค„์ง€๋Š” +๋‚˜ํƒ€๋‚ธ๋‹ค +##์™ธ๊ณผ +LS +๋…น์ฐจ +์˜ฅ์ƒ +์ข…๋กœ๊ตฌ +์ผ๋‹น +ํ•ฉ๋‹น +##ike +๊ธฐ๋…์‹ +ํšจ๋Šฅ +์ถ”์ด +์„ธ์ด๋ธŒ +##์ž์œ  +๋ฐ•ํ•ด +ํ•œ๋ผ +๋๋‚  +์ง๊ตฌ +์„ž์ธ +์š”๊ฑฐ +์ˆ˜์‚ฐ๋ฌผ +##ous +##์™œ๋ž€ +์†Œํ’ˆ +์ฐจ์ธฐ +##๋ฒ•์ธ +์†Œ๋ช… +์ฃผ์ถค +๋ชฉ๋™ +ํƒœ๊ทน๊ธฐ +๋‚˜ํด๋ ˆ์˜น +##์ˆ˜๋ฆฌ +๊ตฐํฌ +##๋ณ‘์› +##ton +๊ธฐ์  +๊ณผ์—ด +์ธ๋””์–ธ +์„œ์ดˆ๊ตฌ +์ž„์ง„์™œ๋ž€ +์‹ ์ „ +ํŒ๋ก€ +๋ฒ ํ’€ +๊ฑด๊ฐ€์š” +##ech +##์˜ˆ์ˆ  +๊น€๊ตฌ +๋†์ถ• +๋‚ฏ์„ค +๋ชฉํšŒ +##ํ›ˆ๋ จ +๋ถ์•„ +์•ก์„ธ +##์ƒ์Šค +์€ํ–‰์žฅ +LI +๋‚™ํ•˜ +ํ—ค์ง€ +ํ•œ์ค‘ +์˜ฌ๋ผ์™” +๋Š๋‹ˆ +์—ดํ˜ +ํ˜ธ์žฌ +ํƒ€์ด๋ฐ +๊ณต์งœ +175 +๊ฐ•๋ ฅํžˆ +์‹œํ‚ด +์‚ฌ์ƒํ™œ +๊บผ๋ƒˆ +ํ•ด์—ญ +๋ฆฌ์•„ +์ง€ํšŒ +๋“œ๋Ÿฌ๋‚ธ +##ide +##๋Ÿฌ๋ฏธ +์Šค์ผ€์ดํŒ… +์ˆ˜๋ฃŒ +๋ถ€ํฅ +์„ค๋ ˆ +์–ด๋งˆ +๋™์‚ฐ +ํ•ฉ๋‹ˆ๊นŒ +์ฐฝ๊ฐ„ +๊ต๋ณต +์ž ์ž +๊ฒฉ๋ฆฌ +๋Œ€์—ญ +ํ•„๋ผ +ํ”ํ•œ +##ํ”„๋ž€์‹œ์Šค์ฝ” +๋‚˜๋‰œ๋‹ค +์ฐฌ๋ฐ˜ +์—ด๊ด‘ +xx +์ •์šฐ +ํŒฝ์ฐฝ +๊ณผ์ฒœ +์˜ค๋ฆ„ +๋“œ๋Ÿฌ๋‚œ๋‹ค +์—„์ง€ +Tr +๋‹ค์‹œ๊ธˆ +##๋ฌด๊ฒŒ +##์„ผ์Šค +์ƒŒํ”„๋ž€์‹œ์Šค์ฝ” +##์˜์‚ฌ +๋งฅ์Šค +์˜๋ฃŒ์ง„ +์ปฌ๋ ‰์…˜ +๋ฐ€์–ด๋ถ™ +๋‚˜๋ˆด +์ฝ”๋ฏน +์ง๊ด€ +##ish +์„ธ๋ถ„ +์ˆ˜์›” +ํ˜ธ๋ฐ• +์‹œ์ผœ์„œ +๊ฒฐ์ง‘ +์ด๋ฉด +Gr +ํ•ด๋ƒˆ +##์–ด์Šค +๋ชธ๋ฌด๊ฒŒ +์Šค๋ฏธ์Šค +##๋ฒ ์ด์Šค +##ํด๋Ÿฝ +๋ช…๋‚˜๋ผ +์•ผ๋ง๋กœ +๋ฒŒ๋ ˆ +๊ทœ๊ฒฉ +##์Šฌ๋ฆฌ +by +๋”ฐ๋ž +์•ž๋’ค +##๋ณดํ—˜ +๋Œ€๊ฒ€ +PO +##๋ฆฌ์นธ +์†ก๊ธˆ +157 +์‚ฌ์˜ฅ +์žฌ์•™ +์ฒญ๊ณ„ +##๊ถŒ๋ ฅ +๋งž๋ถ™ +์ธ์œ„ +1956 +์ฒด์กฐ +ํ™”๋ž‘ +๋“ค๋ ค์™” +##ern +##์ฐŒ๊ฐœ +๊ฐœ์‹  +๋ชจ์˜€ +์œก์ง€ +##LED +๋ฏธ์ธ +์‚ดํŽด๋ณผ +์ €๊ฑฐ +๊ด€๋ฌธ +ํƒํ—˜ +๋‹จ๊ธฐ๊ฐ„ +์„ธ์•ก +##์•ค์ ค๋ ˆ์Šค +๋‚ด์„ธ์šด +์ฃผ๋‹ˆ์–ด +##๊น€์—†์ด +๋กœ์Šค์•ค์ ค๋ ˆ์Šค +์ด์žฅ +๊ฐ€๋‚˜ +์„ธ๋ธ +๋…ธ๊ณจ +์‚ฐ์—…์€ํ–‰ +2030 +์ค„์–ด๋“œ +##์žฌ๋‹จ +1958 +๊ธˆ๊ณ  +์•„๋ชจ +๋ ˆ์•Œ +๋‘”๋‹ค +์กฐํ˜„ +๊ตด๋Ÿฌ +์žํƒ +์ˆœํšŒ +ํ•™์  +์‹ญ๋‹ˆ๋‹ค +์Šน๋งˆ +๊ฐ€์ด๋“œ๋ผ์ธ +๊น€ํ•œ +์‹ฌํฌ +๋‹ค์ • +##๋ฌด์‹ค +์‚ด์•„๊ฐˆ +๋™์ธ +##ove +์˜ฌ๋ผ๊ฐˆ +165 +์„ฑ๋™ +135 +๊ณตํ‰ +ํ•˜์‚ฌ +์บ„๋ณด +์กฐ์› +์ด๋ค„์ง€๊ณ  +์žฌ๋ž˜ +๊ฑธ์–ด์„œ +์˜ฎ๊น€ +์ถ”๋ฆฌ +##์ž…๋‹ˆ๋‹ค +๋ฅด๋„ค์ƒ์Šค +ํ˜ธ๊ฐ +๋‚œ๋ฐฉ +๋Œ€ํ‘œ๋‹จ +ํฌ์ƒ +ETF +์—ฌํ–‰์‚ฌ +๊ฑธ๋ฆฐ๋‹ค +๋ฆฌ๋ฐ” +์‚ฌ์—…๊ฐ€ +๋ฉด์„œ๋„ +๊ณต๋Œ€ +SW +์›๋…„ +๋ฌธํ•™์ƒ +์ง€์œผ๋ฉฐ +155 +๋ฌด๊ถ +์ฃฝ์˜€ +์›๊ฒฉ +์ฒ ํ +ํ—ˆ๊ตฌ +##oul +๋ถ€์˜์žฅ +ํ—ˆ๋ฌด +์‚ฌํ•™ +123 +์ง„ํ†ต +๋‚ด๋ ค๋‹ค๋ณด +์‹œ๊ตญ +์˜์ˆ˜ +๊ฑฐ๋‘˜ +์‚ด๊ท  +๊น€์œ  +๋ณด๋‹ต +์›Œํฌ์ˆ +๊ตญ๊ธฐ +##ํŽ˜์ด +์„ธํƒ๊ธฐ +์กฐ๊ฒฝ +๊ณ ์•„ +๊ท€ํ™˜ +๋„์ž๊ธฐ +##๋ ์ด +##์ƒ์‚ฐ +์ง์œ„ +์›จ์–ด +์œ ์ƒ +##๋ฒ„๋ ค +๋ถ๊ทน +##๊ฑฐ์ง€ +๋ณดํ›ˆ +์†Œ์ด +450 +์ด๋ฆฌ์ €๋ฆฌ +๋น„์ค€ +์•Œ๋ฃจ๋ฏธ +๋„˜๊ธด +์žฌํŒ๊ด€ +๋– ์˜ค๋ฅธ +์ฃผ์ž… +์†Œ๊ณ ๊ธฐ +ํ…ํŠธ +์ •๊ฒฝ +์œ ๊ณต +##๋ง๋ผ +York +ํ•ด์„  +์ปค์„œ +๋ฌด์„ฑ +์Šน์ฐจ +๋ณด๊ฑด๋ณต์ง€๋ถ€ +PB +์ „์„ฑ๊ธฐ +๊ฐ€์•ผ +๋„๋ฌด์ง€ +John +##LL +์šฐ๋š +์‚ฌ๋‹น +๋•Œ๋•Œ๋กœ +์•Œ์•„์ฐจ +์ˆ˜์š”์ผ +์ฝ”์–ด +์ด์„ธ +##์œ„์น˜ +๋ณ‘ํ•ฉ +##old +์ค„์ค„์ด +University +๋งˆ์Œ์† +์•„๋ถ€ +ํ‘ธ์ง +๋™๋ฉ”๋‹ฌ +์‚ด์•„๋‚˜ +##์›”๋“œ +์ „ํ•œ๋‹ค +์›นํˆฐ +ํ˜•ํ‰ +์ด๋†ˆ +๊ฑฐ๋ผ +์‹œ๋ ฅ +๋ถ€์ ์ ˆ +๋งž๋Œ€๊ฒฐ +์Œ๋‘ฅ์ด +๋นจ๋ž˜ +##๋Œ€๋ถ€ +๋ฒฝํ™” +์‚ฐํƒ€ +์—ฌ๋„ +์ˆ˜์ค‘ +๋‹ค์น˜ +๋ฆฌ์šฐ +๋‚˜๋ฆฌ +๊ตฌ์กฐ๋ฌผ +์‹œ์‹œ +##orn +์•„ํฌ +๋งˆ๋ ฅ +์ด๋ฌด +๋‹น๊ณผ +ํƒœ์ž +์ง„์ž‘ +๊น€์ฒœ +๋ฐฉ๋ถ +์ •์žฅ +๊ณ„๋ช… +ํ•ญ์•” +์•ผ์ง€ +##๋ฐ•์Šค +์˜ˆ๋งค +์Šค๋…ธ +๋ ˆ์ง€ +๋ฒ„๋ฆ‡ +์ˆœ๋ก€ +๊ทธ์ตธ +์ž์œ„ +์šฉ๋‚ฉ +LNG +##๋‚ ๋ ˆ +๋ฐ•์žฌ +์€๋ฉ”๋‹ฌ +๊น€์˜์‚ผ +์ค‘์ง„ +CBS +๊ณผ์–ธ +with +๋ฒ•๊ทœ +์•˜์—ˆ +์ €์ง€๋ฅด +๋Œ€๊ถŒ +๊ณก๋ฌผ +์˜ํ™”๊ด€ +ํ•˜๋„๊ธ‰ +ํƒœ๋ฐฑ +##๊ตญ์  +์‹ ์ฒญ์„œ +์ทจ์ง +์šด๋™๊ฐ€ +์ธก์€ +๊ณจ๊ณ  +๊ณ ์•ˆ +๊ฐ๋ณธ +๊ธฐ๋™ +์‹œ์ง„ํ•‘ +๋ฏธ์–€๋งˆ +๋‚จ์•„๊ณต +๋ณธ๋ช… +์‹ ์Œ +๋ชจ๋˜ +๋ฒ„๋ฝ +์ผ๊นจ +์ปด๋ฐฑ +ํ์—… +1947 +๋ฌด๋Šฅ +์œ ๊ฒฉ +๋ชฌ์Šคํ„ฐ +์„ธ๋‹จ +์บ์ฃผ์–ผ +์บ„๋ณด๋””์•„ +ํ•˜๋™ +ํƒœ์„ธ +##์—ฌ๊ฒจ +์ถœํ•˜ +220 +##๋ฐ•์ด +๊ณผ์ง•๊ธˆ +๊ฐœ์ธ์ „ +๋Š์ž„์—† +##์‹ญ์ž +๋‹ฌ๋‹ฌ +104 +์žฅ๋ถ€ +##์œ ์˜ˆ +์ปจ์…‰ +ํŒ”๋ ˆ์Šค +๋‘ฅ์ง€ +์•„๋ฐ” +์™•ํ›„ +์ฝ”๋ฅด +๋Œ€๋ น +๋‚ ๋ ธ +๋ˆ๋‹ค +1953 +๊นจ๋‹ฌ์Œ +์• ๊ธฐ +์†Œ๋ฐฉ์„œ +์•…์ทจ +๋จธ๋ฌด๋ฅด +์„ธ์‹ฌ +๋ˆˆ๋†’์ด +ํฌ๊ท€ +๋ณต๊ถŒ +๊ฒฝ์‹  +์ซ“์•„ +162 +๊ฐœ๋ฐœ์ž +156 +๋ฐ•๋ณ‘ +๋„์ฃผ +๊ทธ๋ฌผ +106 +์™ธ์‹  +ํŠ€๊น€ +๊ถŒํ•˜ +107 +๊ฐ๊ณ„ +์ถ”๊ธฐ +๊ณต์†Œ +ํŠธ๋ž˜ +๊ด‘๋Œ€ +๋„ํ”ผ +ํ’ˆ๊ฒฉ +๋‚ด๋ณด๋‚ด +ํ™˜๊ธฐ +์˜์กฐ +๋ฐ˜ํ•˜ +์—ฌ์ง„ +์Šค์ผ€์ค„ +๋‹ˆ๊น +๋ฌด๊ฑฐ +##ํผ๋‹ˆ +์•Œ๋ฃจ๋ฏธ๋Š„ +ํ•ญ๋กœ +VR +ํŒŒํ‚ค์Šคํƒ„ +์žฌ์ฐจ +##์ถœ์‚ฐ +์–ด์งธ์„œ +๊ฐ„๊ณผ +์ขŒ์ต +์Šคํƒˆ +ํ‚ค์šธ +์ฝ”์ฝ” +์ค„์˜€ +์ž”์•ก +๊ธฐ๋ถ€๊ธˆ +๊ฒŒ์ŠคํŠธ +๋ˆˆ์—ฌ๊ฒจ +์›ƒ๊ธฐ +์ค‘๋ ฅ +109 +ใ… ใ… ใ… ใ… ใ…  +##์ถ•๊ตฌ +์Šคํฐ +๋ถˆ๋ช… +๋ถˆํƒ€ +์“ธ์“ธ +์•„ํ†  +๊ฐˆ๋ž˜ +์‹œ๋ จ +์ ‘์ „ +์ƒ๊ธ‰ +๋ณด๋ผ +๊ฑฐ์นจ +๋™ํƒ„ +##ann +์—ฟ๋ณด +์ง€๋‚˜๊ฐ” +๊ฐ€๊ธ‰ +ํ”„๋ Œ +๋“ค์—ฌ๋‹ค +์ •๋ฐ˜๋Œ€ +๋ฐœ๋ฐœ +๊ธ‰ํ•œ +##์˜ค๋กฑ +์ˆ˜๊ฑฐ +๋ ˆ์ฝ”๋“œ +๋ชฉ์š”์ผ +์–ด์Šค +ํ•ด๋ฐœ +๊ฒน์น˜ +๋Š˜๋ ธ +์•Œ๋งž +๋ˆˆ์น +์ฐจ์ž… +์•„์œ  +์œ ์ • +CF +๋ฌธ์‹  +์„œ์ธ +ํ—ค๋งค +##์กฐ์‚ฌ +์ „ํ–ฅ +##๋ฉ”์ด +์ •์ • +๋ฃจํƒ€ +๋ฏธ๋‹ฌ +ํ–ฅํ†  +์—ฐ๋ชป +๊ฒน์ณ +๊ฐ•๋‹น +๊ทœํƒ„ +๋ผ๋“ ์ง€ +๋ฐ˜๊ฐ€ +you +์™ธํ˜• +์กฐ์„ ์†Œ +๊ณต์•ˆ +๋ฐ˜๋ฌธ +์ƒ์„  +๋ณธํ†  +์ด๋Œ€ํ˜ธ +์ €๋ฆฌ +์ ํžŒ +##๊ณก๋™ +##ํŽ˜์ด์Šค +๋Š˜์–ด +##์‹œ๋Œ€ +๋‹ค๋‹ค +๋ณต์ข… +240 +๋‹ดํ•ฉ +๊ณต์ฒญ +์Šต๊ฒฉ +๋ฐฐ๊ธฐ +๊ฐœ๋ง‰์ „ +๋Œ€๋ฆผ +ํ‚ค์›  +Car +์„ธ์œจ +์‚ฌ์™ธ +๊น€๋‘ +๋กœ์„  +์ง„์ด +์š”์ฝ” +๋กœ์›Œ +๋‹ฌ๋น› +1959 +ํ”„๋กœ๊ทธ๋ž˜ +๊ณจํผ +๊ณง์žฅ +1910 +๋‚ด์„ธ์›  +์˜์ง€ +##๋ฉด์  +์ž๊ฐ +๋ณด๋ณ‘ +ํ”ผ๊ฒจ +๋ฐœ์ž๊ตญ +์˜๋ณ‘ +๋ฐ”๋‚˜๋‚˜ +๊ฑฐ๋“ญ๋‚˜ +๋ณธ์—ฐ +ํŒ”๋ ˆ์Šคํƒ€์ธ +๊ณต๊ณผ +์•ˆํ‹ฐ +์•„๋ฌด๋ ‡ +์—˜๋ฆฌ์ž +ํ‰๋‚ด +ํ”„๋ž‘ํฌ +๋ช…ํ•จ +์œ ๋ฌด +##ํ—ˆ๊ฐ€ +์ƒ์‚ฐ์ž +์ž‘์‚ฌ +๊ตฌ์Šฌ +๋งค์ง +1955 +ํ† ์ข… +๊ผผ์ง +์ฆ์ž +1800 +์ด๋ธŒ +๋ง‰์—ฐ +๊ณจ๊ณ ๋ฃจ +์นด๋ฆฌ์Šค +์ผ๋ฆฌ +์พŒ๋ฝ +##๋‚ด์…”๋„ +##๋ž€ํŠธ +์†์ดˆ +ํ•œ์˜ฅ +์Šคํƒœํ”„ +ํƒˆ๋ถ์ž +๊ด€ํ†ต +๋‹ค๋…€์˜ค +์ถœ๋ฐœ์  +๊ท ํ˜•๋ฐœ์ „ +ํ•ญ์ผ +๋‹น๊ตญ์ž +๋‚˜๋‚  +ํ›”์ณ +161 +์ธํ„ฐ๋‚ด์…”๋„ +๋†€์ดํ„ฐ +๊ต์ง์› +๋ณด๋ฅ˜ +์ž„์ข… +164 +์ž์ƒ +์‹ฑํฌ +๋ฏธ๋ฌ˜ +์ƒ๋‹จ +๊ฒฐํ• +ํŽผ์นœ +๋ฒˆ๊ฐˆ +์‹œ์ฑ… +ํ‹ˆ์ƒˆ +๊ฑด์ถ•๊ฐ€ +##๋‚˜๋“œ +ํ„ฐ๋œจ๋ฆฌ +๋“ค์–ด์„ค +์ˆ˜์—†์ด +๊ฐ•์„ฑ +๋Œ€์ฑ…์œ„ +์•Œ๋œฐ +๋‹ค์šด๋กœ๋“œ +๋ˆ„์ด +##๋ฐ”๊ฟˆ +๋ถ์„œ +๋…ธ๋™๋ ฅ +Wh +๋งจ์ฒด์Šคํ„ฐ +์ €์ชฝ +##TS +##๊ทธ๋ ˆ +๊ตฐ์ค‘ +๋Š˜๋ฆด +์ฝ”์˜ค๋กฑ +ํ…Œ๋งˆํŒŒํฌ +๊ณ ๋ชจ +๋ณด๊ฑด์†Œ +์œ ๋Šฅ +์ค‘์› +##OR +์„ธ์ผ์ฆˆ +1919 +์ตœ์žฅ +๋ชฉ์žฌ +ํ•œ๋‹ค๋ฉฐ +๊ฐ€๊ธ‰์  +##ris +์ฒœ์ฒด +ํœ˜๋ฐœ์œ  +์‹ ์˜ +151 +ํŒจ๊ถŒ +๊ฐํ–‰ +##ํ˜„์‹ค +์šฉ์˜์ž +์›์„œ +์ œ๋Œ€ +##์ฃผํƒ +๊ฐ„๋‹ค๋Š” +๋Œ€์šฉ +๋ฏธ์‹œ +๋Š˜์–ด๋†“ +์œค๊ณฝ +๊ฒฐ์—ฌ +๋ฐฐ์—ด +๋น„๊ด€ +๋†์ˆ˜ +๊ฒฐ์„  +๋ฐ”๋Š˜ +์ •๋ก€ +๋ณด๋‹จ +##์น˜์•„ +์นดํƒ€๋ฅด +##์งˆ๋Ÿฌ +์ •๋ฅ˜์žฅ +๋‹ดํ™” +๊ธ‰์†ํžˆ +์‹ค์„ธ +์ถ”์ข… +์‹์ด +๊ตฌ๋น„ +์‹๊ฐ +๊ฒฝํ–ฅ์‹ ๋ฌธ +168 +์ •์‚ฌ +ํ˜๋Ÿฌ๊ฐ€ +์‹œ์œ„๋Œ€ +์„ฑ์ „ +๊ฑด์„ค์—… +์ ˆ๋ฐ• +์ฒด๋‚ฉ +์ฑŒ๋ฆฐ +๋‹จํ’ +์„ฑ๋ชจ +์—ฌ๋ ฅ +๋งค์Šค +์™ธ๋ฌด +##์ฒ˜๋ถ„ +์ปดํผ๋‹ˆ +๋“ค์œผ +์ €ํƒ +์ด์˜ +๋ฐ˜๋‚ฉ +ํ˜๋ ธ +์‹œ๋ฐœ +๋ง์น˜ +์˜ค์‚ฐ +๋ฐฉ๋Œ€ +ํŒŒ๋ฅด +์ž…๊ธˆ +๊ณต์ฑ„ +์ถ”๊ธฐ๊ฒฝ +๊ณ ์•ก +๊ธฐ์ฒด +์žฅ์• ๋ฌผ +์ฃผ๋‹น +๋”ฐ๋ฆ„ +๊ตฌ์ธ +ํญ๋ฝ +์“ฐ์˜€ +ac +์กด์† +##ife +๊ณ ๋กœ +##์‹œํ”ผ +๋ฉธ์ข… +##์‹ ์ € +์‹œ๋ฎฌ +์Šคํ‹ฐ๋ธŒ +์—ฐ๊ตฌํšŒ +์ •์ œ +๊น€๋ฐฅ +๋™๋ฐ˜์ž +##๊ฐ€์น˜ +##์ •๋™ +๋‘ํ”ผ +##ํ€˜์–ด +##์šด์šฉ +##์ฃผ๊ตฐ +์•ต์ปค +์ฝœ๋กฌ +##์ˆ˜์ง€ +์˜์•„ +์•„๋ฉ”๋ฆฌ์นธ +ํ•˜๋“œ์›จ์–ด +##๋กœ์›Œ +๋ถˆํ™” +์ฐฌ์–‘ +๋””์ฆˆ๋‹ˆ +๋น„์ฐธ +์นœํ•œ +์‹ ์ฃผ +๋”์šฑ๋” +1954 +##ํด๋ฆฌ +##und +##ํ•˜๋งˆ +##ph +152 +์ด์ƒ์‚ฐ +๋น„์—” +์œ ๋ฐฐ +๋ฐฐ์›€ +๋‹ด์žฅ +์ „๋ฌธ์˜ +์šธํƒ€๋ฆฌ +No +๋Š๋ฆฌ +ํƒํ•˜ +์ž์„  +์ด๋ฃจ์–ด์งˆ +๋งŒ๋‚œ๋‹ค +์ œ์กฐ์‚ฌ +๋น„์›ƒ +๋ ˆ๋ชฌ +์ˆœ์ฐฐ +์ˆ˜์—ผ +๋Œ์–ด๋‚ด +๋†๋‹จ +์Šฌํผ +์นผ๋กœ๋ฆฌ +๋ฌด๊ณต +##RI +๋ฐฐ๋ถ€ +๋ฉ”์นด +ํŽ˜๋ฅด์‹œ์•„ +kt +์ฝ”๋ผ๋ฆฌ +๋๋‚ฌ +์–ผ๋ฃฉ +์†์งˆ +์‡ ํ‡ด +์Œ์•…๊ฐ€ +์›€์ง์˜€ +์ ‘๋Œ€ +##ome +##ive +์—ฌํ–‰๊ฐ +๋งˆ๋…€ +๋งˆ์Œ๋จน +๋ชฉ์ ์ง€ +์ž์ด์–ธ +์œ ํฌ +##์งœ๊ธฐ +์ธ์„  +๊ถŒ์ด +##ํŒŒ๋ฆฌ +๊ณต๋ฒ• +##๋ฆฌํ•™ +์ผ์–ด๋‚œ๋‹ค +##๊ฐˆ๋น„ +๋Œ€๋ฆฌ์ธ +๋ณ€ํ•œ +๊ธฐ์ˆ˜ +์ข…์ž +๋ฐœ์‚ฐ +๋Œ€์Šน +๋ ˆ์Šค +๊ณค๋‘ +##๊นจ๋น„ +๊ฐœ๊ทธ๋งจ +์œˆ๋„์šฐ +ํƒ€๋ฝ +ํ”ผํฌ +์ฟ ์…˜ +๊น€์› +์„ธ๋ผ +ํ‹ฐ๋ฒ ํŠธ +ํŠน๊ธฐ +๋ถ€์‘ +176 +ํ™€๋”ฉ +์กฐ์—… +ํ˜ธ์นญ +์น˜๋งˆ +์ทจ์žฌ์ง„ +BC +163 +์ˆ˜๋„์› +ํ™˜๊ธ‰ +์˜ํ•˜ +ํ•œ๊ตญ์ „๋ ฅ +์—ฐ์ค‘ +์ „๋ผ๋„ +์ฐธ์น˜ +์“ธ๋ชจ +์กฐ๊ฐœ +๋ชจ์…” +์ผ์„ธ +์ถ”ํ›„ +๋Œํ’ +๋ถ€๋„๋Ÿฝ +๋ฏธ์ƒ๋ฌผ +128 +์•ผ์Šค +์ดˆ์กฐ +139 +๊ธ‰๊ธฐ์•ผ +์ œ์™• +##์ด์•ก +๊ฒฌ๊ณ  +ํ•œ์„ฑ +๋ฆฌ์˜ค +์—๋”” +##ati +##DS +1957 +์ƒ๊ฑฐ๋ž˜ +์ฐพ์•„๊ฐ” +๊นŠ์ˆ™์ด +์‚ฌ๋ฒ•๋ถ€ +๋“ ๊ฐ€ +์ˆ˜์‹ +ํ—ˆ๋ฒ… +์งˆ์˜ +์ƒ๋ถ€ +๊ฑฐ์ง€ +๊ฐœ์‹ ๊ต +๊ฐ๊ฒฉ +๊ณต๋ช… +์•„์ฟ  +๋งค์‚ฌ +์นด๋ฆฌ์Šค๋งˆ +์นœ์ˆ™ +์‹œ๊ฐ€์ด์•ก +ํŒŒ์šธ +์นด์นด์˜คํ†ก +์ˆจ๊ฒจ์ง„ +ํ•จ๋Ÿ‰ +๋จธ์‹  +์ถฉ์ฒญ๋‚จ๋„ +์ถ”๋Œ€ +์ˆ™์ด +๋ฒฝ๋Œ +##์‹œ์ŠคํŠธ +##๋‰ด์–ผ +๋ฌผ๋ฆฌ์น˜ +์„œ๊ท€ +์™ธ๊ณ  +๋ฒ ์ŠคํŠธ์…€๋Ÿฌ +์นœ๋ถ„ +ํ•ด์ค€๋‹ค +๋‚ด๋ฒ„๋ ค +##์ž์œ ๊ตฌ์—ญ +ํƒœ์•ˆ +##๋ฒ ๋ฆฌ์•„ +์˜ฌ๋ผ์˜จ +##able +##ier +์›์•ˆ +##์ˆ˜์› +##๋“œ๋ฆ„ +์ •์  +##๋“ค์ธ +์“ธ๋ฐ +์—„์ค‘ +##tr +์˜ค๋ฅธ์† +๊ฑฐ์‹œ +ํ™œ๋ฐœํžˆ +ํ˜ธ์˜ +์‚ฌ์œก +๊ฐœ๊ต +๋ฉด์—ญ๋ ฅ +๋ฌต๋ฌตํžˆ +๋ฐ”์ด์˜ฌ๋ฆฐ +ํ”ผ๋ ฅ +์‹ ์ง„ +๊ฒฝ์ œ์ž์œ ๊ตฌ์—ญ +๋น„์•„ +์–ด๋Š๋ง +์™•์„ฑ +์š”๋ž€ +์‚ฌํ‘œ +๊น€ํ˜œ +๋– ๋Œ +์†์‚ญ +ํƒˆ๋ฐ”๊ฟˆ +์Šฌ๋ผ์ด +๊ณ„์…จ +๋ด‰๊ฑด +๊ณตํ‘œ +์ˆœ์ฐจ +ํ•œ์ƒ +๊ณ ๋ นํ™” +์ด์žฌ์šฉ +์‚ฌ์ ˆ +ํŽผ์นœ๋‹ค +##๋‹ˆ์Šคํƒ„ +์ž๋‹ˆ +ํ•˜๋ฃจํ•˜๋ฃจ +์ด์Šน์—ฝ +##ystem +์‚ฌ์น˜ +๊ธฐ์šธ์˜€ +์ชผ๊ฐœ +๋‹จ๊ณผ +์กฐ์˜ +์˜ˆ๋ณด +์˜ค๋””์˜ค +๊ทธ๋†ˆ +์ˆ˜์ถœ์ž… +๋„์ • +ํ‹€๋ฆฐ +์—ฟ๋ณผ +๋†์•ฝ +##ํ”„์Šค +ํฉ์–ด์ ธ +์†ํ†ฑ +์„œ๋‚จ +๊ฐœ๊ตฌ๋ฆฌ +๋‹ค์ผ€ +๋งž๋ฌผ๋ ค +์ƒŒ๋“œ์œ„์น˜ +์—ฌ์ˆ˜์‹œ +์–ผ์–ด๋ถ™ +์˜ˆ๋…„ +์• ์“ฐ +113 +##ath +์„ธ์šธ +##๋œจ๋ ค +๋‹น๋ฉด +##๋„ค๋งˆ +LPGA +์ดˆ๋ฐฅ +๋ฐ˜๊ฒฝ +์Šฌํ”„ +##์–ด๋ƒˆ +##์–ด์•ผ +##๊ณต๋‹จ +1600 +๋กœ์—ด +๋ฆฌ๋น„์•„ +ํƒ€๊นƒ +PS +๋ถ€๊ฒฐ +์šฉ๊ฐ +์›๋งŒ +ํ‹€๋ฆผ์—†์ด +์–ด๋‘ก +๊ธ‰์ง„ +en +์€์€ +๊ฒฐ๋ง +์ข…ํŽธ +##์ฝ”์Šค +๊ฐ„์ฒฉ +ํ–ˆ์ž–์•„ +์œ ์„œ +EC +ํ˜ธํก๊ธฐ +์„ ํŒ” +๋“ฑ์žฅ์ธ๋ฌผ +##์‚ด๋ ˜ +AC +๋ฌผ์ž +๊ฐฑ์‹  +์ด‰๋ฐœ +๊ต๋‹จ +๋“œ๋ฆด๊ฒŒ์š” +์ฝ”๋”” +##๋ฌธํ™”์žฌ +์ž์† +์•ฝํƒˆ +##๋ฃจ์‚ด๋ ˜ +๋์œผ๋‚˜ +๋Œ€์šฐ๊ฑด์„ค +์œ ๊ฐ€์ฆ๊ถŒ +##๋‹ค๋…” +##์•…์‚ฐ +153 +ํ™˜์‚ฐ +์‹œ์ข… +์ฐฝ์˜๋ ฅ +์žฃ๋Œ€ +์ž‡๋”ฐ๋ฅด +์Šค์บ”๋“ค +์œ ์—ญ +์ฐŒํ‘ธ +์–ด์ฉ +๋Œ€๋‚˜๋ฌด +์„ธ์ž…์ž +##les +์ ‘ํ•  +์„ฑ์ง์ž +์žฅ์‹œ๊ฐ„ +##๋งˆ์ผ“ +๋‚จ์ธก +Or +๋ฌด๋ช… +์—๋“œ์›Œ๋“œ +๋ถ๊ฒฝ +์Œํ–ฅ +##๋ผ์ด๋‚˜ +ํž˜์“ฐ +Comm +์„œ๊ท€ํฌ +์ฒญ๋Ÿ‰ +๋’ค๋ฎ +##๋ฌด๋Šฌ +121 +##โ”โ”โ”โ” +๋ฐ•๊ฒฝ +##์„ธ์ž +์Šคํ”ผ์ปค +์น˜๋ฃŒ๋ฒ• +์‹œ์–ด๋จธ๋‹ˆ +๋‹ค๋ฐฉ +๋‹ค๊ตญ์  +๊ณ ํ•จ +๋งˆ๊ทธ +์•ˆ๋„ +๋ฐ€๊ฐ€๋ฃจ +ํ‘œ์ƒ +๋‚ด๋ ค์™€ +์šด์ž„ +๋ ˆ๋‹Œ +##ations +ํ„ฐ์กŒ +๊ณ ํ’ˆ +๋ถ€๋‹ด๊ธˆ +๋งˆ๋‹ˆ์•„ +EP +์ˆ˜์šฉ์ž +์†Œ์–‘ +##์•„๋‚˜ +์ˆ˜ํ‘œ +Intern +์š”์ • +ํ™”์‚ฌ +๋ถ€์œ„์›์žฅ +๋ฆฌ์น˜ +๋ฐ˜๊ฐ +์•ˆ๋ฉด +์—ฌํƒœ +GB +์›…์ง„ +๋“œ๋ผ์ด๋ฒ„ +์™ธ๋ž˜ +์•…์šฉ +๊ฒฝ๋งˆ +์ €ํ•ด +##์‚ฌํ‚ค +์ž…์ƒ +์ƒํšŒ +์†Œ์ง„ +๋ฉ”์ด์ปค +ํ† ๋งˆ์Šค +ํŽผ์ณ์ง„ +๊ณต์ง€ +์Šคํฐ์„œ +์•ฝ๊ด€ +ํƒ์ž +์•„์ด๋”” +ํ•ด๋ณ‘๋Œ€ +##์—ฐ๋งน +์ฆ์•ก +์ฑ…์ž„๊ฐ +##AP +##๋กœ๋”” +์‹ธ์›  +๊ณ„๋ฐœ +##์‚ฌ์ „ +์žŠํ˜€ +๋ชจ์Œ +DS +๋ฎŒํ—จ +๊นจ๋—์ด +me +1952 +์„ธ์›Œ์ง„ +์ œ๊ฐˆ +๊ตฌ๋งŒ +๋‘๋ ต +ํฌ์ŠคํŒ… +BBC +##ํฌํ„ฐ +์ˆ˜๋…€ +์‹ฌํ˜ˆ +br +ํ•ญ์†Œ์‹ฌ +127 +๋ชจ๊ธฐ +##MS +์šด์ˆ˜ +๋ฉ”ํŠธ๋กœ +์Ÿ์•„์กŒ +๋‹จ๊ตฐ +์†์˜ท +์žญ์Šจ +##me +ํ™ฉํƒœ +##of +์ค‘์žฅ +๊นจ์šฐ +์ง€ํœ˜๊ด€ +๊ทผ๊ฐ„ +์ƒ๊ฒจ๋‚ฌ +ํ˜๋ ค +ํƒ€๊ณ ๋‚œ +๋“ค๋ ค์ฃผ +##๋ผ์ดํŠธ +์กด์žฌ๊ฐ +๋Œ๋ ค์ฃผ +์–‘์งˆ +์•„์šฐ๋ฅด +์•ˆ๋ชฉ +ํ•œ๋ฐœ +๊ฐ•์ธ +์ง„์—ด +ํŒŒ์ดํ”„ +##sp +๋Š”์ปค๋…• +์–‘๋Œ€ +144 +๋ถ€๋‘ +์žฅ๋‹ด +์นด์Šค +##ollow +์–ด๊ธ‹๋‚˜ +๋น„์ฃผ์–ผ +ํŒ๋ฌธ +์žฌ์ฃผ +117 +์ƒ๋ด‰ +ํ–‰์  +๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค +๊ธฐ์ผ +์ด๊ธฐ +๋ฐ”๋ฆฌ +##ite +๊ฑฐ๋Š๋ฆฌ +๊ฒฝํฌ๋Œ€ +ํ˜•๋ฒ• +##๊ฒŒ์ž„ +๋–จ์–ด๋œจ๋ฆฌ +์ด๋‚˜๋งˆ +์ฒ™๋„ +๋งค๋‰ด์–ผ +์™ธ๊ต๊ด€ +์œ ์žฌ +ํ”ผ๋“œ +๊ธฐ์นจ +๋“ค์–ด์™€์„œ +ํŒŒํ—ค +์ •์ค‘ +์ฃผ๊ณต +์˜์‚ฐ +Comp +๋ฐ”๋น  +๋งค์ฝค +ํ™€๋”ฉ์Šค +์žฅ์› +์ด์†ก +๋Œ์•„์„œ +๋ถˆ์ถœ +ํ‹€๋ฆฌ +์ˆ˜์ž์› +##ํ„ดํŠธ +๋ผ์›Œ +##ํ† ์Šค +์•ก์„ธ์„œ๋ฆฌ +ํ˜„๋ฏธ +๋ด๋ผ +##๋Ÿฌ๊ธฐ +์ฒด์ „ +์œ„ํ—Œ +์ถ”๋ก  +์ ์ž– +๋ง๋ถ™์—ฌ +##anc +ํƒํ–ˆ +์˜ˆ๋ฃจ์‚ด๋ ˜ +๋ง๊ฐ€ +๊ฐ•๊ฐ„ +์ž์‚ฐ์šด์šฉ +ํŽ˜๋ฆฌ +์ง‘ํ–‰๋ถ€ +์•„๋ฆฌ์Šค +์—ฐํƒ„ +ha +๋„์–ด +##๊ตฌ๋ฉ +์„ ์ฐฉ์ˆœ +๊ฐ‘๋‹ˆ๋‹ค +๋ชธ์ง“ +์‰ฌ์›Œ +##ition +##๋‹ต๋‹ˆ๋‹ค +ํ•ฉ์‚ฐ +๋˜‘๋ฐ”๋กœ +์ €์ถœ์‚ฐ +์˜ค๋ฆฌ์ง€ +๋”๋” +์…ฐํ”„ +ํ•˜์–— +๋ฃจํ„ฐ +์ง€์ฃผํšŒ์‚ฌ +๋ฒˆ์‹ +๊ฐ€๋ถ€ +ํ•ด๋… +์ž์ฑ…์  +์žฅ์•ˆ +์ธํ”Œ +ํก์‚ฌ +๋ณดํ˜ธ๋ฒ• +ํ™ฉํ›„ +์ฆ‰์„ +ํŒŒ์† +๋น„๋ˆ„ +๋ง๋ฏธ์•” +์นœ๋ฐ€ +##๊ธ€๋ผ์Šค +๋“œ๋ ค +๋…น์ง€ +๋ฐ˜๋ก  +์ตœ์˜ +ํ•œ์€ +Co +๋‹ฌ์•„๋‚˜ +์—ˆ์—ˆ +AD +๋ณด์ด์Šค +๋ถ๋™ +์ฒด์˜จ +์ƒ์‘ +Pol +์ง€ํƒฑ +๋‹ค์ค‘ +๋Œ€๊พธ +์„ธ๋ฆฌ +๋ชฐ๋ ธ +์Šคํ†ค +์นด๋ ˆ +์—Ž๋“œ +๋ฏธํ˜ผ +์“ฐ์ธ +์กด์—„ +๋ƒ‰๊ฐ +๋ณ€๋ชจ +##ํ‚ค์Šค +tvN +์ˆ˜๋ฐ• +##์ฟ ์Šค +์„ฑ์ง€ +๋‘๋“œ๋ฆฌ +149 +ํ†ต์‚ฌ +์—ญ์  +์—ฌ๊ด€ +๊ด€์ฒ  +๋˜์ƒˆ +์ง์ฑ… +์•„์—ฐ +ํ„ฐ์ง„ +ํ† ๋‹ˆ +์ €๋ ‡ +๋ฐ•์ฅ +๋ฐ˜ํ•ญ +์ „์‹œ์žฅ +๋™๋ฐ˜์„ฑ +๋‚จ๊ฒฝ +ํ˜ธ์Šค +์—ฌํ–‰์ž +๊ด€๋ จ์ž +ํ—ˆ๋ฒ…์ง€ +๊ณต์ฒญํšŒ +์ข…๊ฐ€ +๊ฐฏ๋ฒŒ +๋งˆ์ง„ +##AE +๊ธ‰๋ณ€ +๋นŒ๋ผ +๋‹น๊ทผ +์–ด๋ ค์„œ +์…€์นด +ํŒจ์ŠคํŠธ +##์ „์ง€ +ํŠธ๋ฆฌํ”Œ +์–ด์šธ๋ ค +์˜ˆ๊ฒฌ +##๋…ธ์ด +๋Œ€๋ฒ•์›์žฅ +##int +##์„ธ๊ธฐ +##๋ฉ”๋ฆฌ์นด +์ ˆ๋กœ +๋ฏธ์„ฑ +์‹ ์ž‘ +๋น„๋งŒ +๊พ€ํ•˜ +ํŠธ๋Ÿฌ +์ƒ์ด +NG +์„ฑ๋ฒ”์ฃ„ +๊ณต๋ฌด +์ฃผ์–ด์ง„๋‹ค +ํ•ฉ์‹œ๋‹ค +์•Œ๋ ˆ๋ฅด๊ธฐ +๋“คํŒ +##์ฐจ๋ฆผ +๊ตญ์˜ +##์‹œํ‹ฐ +์‚ฌ์šฐ๋”” +๋“ฌ๋ฟ +##์ˆญ์•„ +141 +World +ํฌ์šฉ +##์–ธ์Šค +๊ฒ€์—ด +๋‘ฅ๊ทผ +์ž๋ฏผ +์„ผํŠธ๋Ÿด +์ˆ™์ข… +๋ฐœ๋งค๋˜ +##ํ•™๊ทœ +์ƒ‰์†Œ +##ํˆฌ์ž +์„œ๋„ˆ +์ฑ„๊ถŒ์ž +์„ ๋ณด์—ฌ +##์ง€๋„ +ํ—ค์•„ +์ œ๊ฐ +##๋‹ค๋‚˜ +##ํ•˜๋Š˜ +์•…์˜ํ–ฅ +๋‚จ์„œ +์ถ•์‚ฌ +์—ฌ๋Ÿฟ +์ด์–‘ +์ฃฝ์ธ +ํ˜„์ˆ˜๋ง‰ +##๋ฐ”ํƒ• +์‚ฌ์ง€ +์Šคํ†ฑ +์„œ๋‘๋ฅด +์›ฐ๋น™ +๋กœ๋งจํ‹ฑ +๋ฌด์‹  +์„ฑ๋ฒฝ +์‹ซ์–ด +##๊ธฐ๋ฆ„ +์ถฉ์ฃผ +์ฑ„๋ฌด์ž +์น˜์ผœ +์„œ๊ฐ• +์ง€์ค‘ํ•ด +์ž‘๋ฌผ +์ž๋ž +๋„๋ณด +์—ผ์†Œ +๊ธฐ์ข… +##์Šฌ๋ฆผ +์ •์žฌ +์ฃ„์ธ +์˜คํ‚ค๋‚˜์™€ +๊ณต์ˆ˜ +์ด์ด +๋ฒŒ๋ฆฌ +๋ณด์„ฑ +์ด์ฆˆ +1300 +๋“œ๋Ÿผ +์†Œํ™€ํžˆ +##๋‚˜๊ฐ€ +๋‹น์ง„ +์ˆ™๋ช… +๋ฐฉ์†ก์ธ +##ank +ํ•˜๋ฃป +๋ฌ˜ํ•œ +๊ธฐ์ƒ‰ +์ž„์ฐจ์ธ +118 +์ง•ํ›„ +์ƒ์†Œ +wh +๋‚˜์—ด +์„ฑ๊ธ‰ +๊ทธ์ œ์•ผ +์ง€๋… +ํ•„์‚ฌ +##ํŠฟ๋‚  +์ผ์‹ +์ž์ˆ˜ +1939 +##์ผ๋“œ +172 +ํ›„์›๊ธˆ +์‹ฌ๋ถ€ +##์ €๊ฒƒ +๋งน์„ธ +๋งŒ๋ฝ +์˜๋…ผ +๊ด‘๊ต +๋‹ค๋…€์˜จ +์ž”๋ฅ˜ +์ด๊ฒƒ์ €๊ฒƒ +์€๋ฐ€ +122 +##ํฌ๋ฆผ +์‹œํ‚จ๋‹ค๋Š” +๋ฟŒ๋“ฏ +์“ฐ๋Ÿฌ์ง€ +์ผ์‘ค +๊ฐ•๋ถ +์ด์–ด๋ฐ› +๋ฏผ๊ตญ +ํ†ต์‹ ๋ง +##ํ‹ฐ์ง€ +์‘ํ•˜ +์ผ์‚ผ +์ดํŠฟ๋‚  +๋Œ€๋• +๊ฑฐ๋ฏธ +1941 +์„ธ๋ฐ€ +๊ด‘ํ•ด +์—ฌ๊ฐ„ +์ธํ”Œ๋ ˆ์ด์…˜ +์ฐจ์ฒด +##ํ‰๊ท  +๋ชจ๋ฆ…๋‹ˆ๋‹ค +์˜์‚ฌ +๊ด‘์ €์šฐ +๋ฐํž +JT +##๋‚จ๋™ +๊ทธ๋ž˜ํ”„ +๋งˆ๋•…ํžˆ +์™ธ๊ณ„ +##์‹œ์ฆ˜ +๋ถˆ์‚ฌ +์œ„ํƒœ +๋‚ด๊ธฐ +##๊ธฐ๋ถ€ +Ac +๋‹ฌ๋ ค๋“ค +##๋ฆฐ๋‹ค๋Š” +ํšŒ์‚ฌ์ฑ„ +๋ฌด์Šฌ๋ฆผ +๋ฌผ๋Ÿฌ๋‚ฌ +์Šค๋ฆด +ํ”Œ๋กœ๋ฆฌ +๊ถŒ๋‹จ +๋น„ํ‰๊ฐ€ +์†Œ๋™ +๋™๋‚จ์•„์‹œ์•„ +๋ฒ”์ฃ„์ž +๊ณ„์ˆ˜ +์ทจ์—…์ž +์„œ์šด +์‹คํฌ +ํด๋ Œ์ง• +๋ณ€๋ก  +์ฐŒ๋ฅด +๋ณด๋‚ธ๋‹ค +๋„๋ผ +##ํŠธ๋ž™ +์ฒœ๋ฌธํ•™ +์•…์ˆ˜ +##์–ด๋งˆ +์†์ด +์กฐ์ˆ˜ +์šฐ๋™ +์˜จ์ „ํžˆ +##ํฌ์ • +##erv +๋Œ์–ด์•ˆ +๋ชจ๋‹ +์œ ๋ณ‘ +171 +๋”์šด +์—ผ์› +##ax +AM +๊ตฌํ˜• +๊ฑฐ์ณ์•ผ +๋Œ€์˜์› +##๊ฑฐ๋ž˜์œ„ +136 +๊ณต์ •๊ฑฐ๋ž˜์œ„ +๋ฌถ์—ฌ +ํ‚ค์šด +Re +์ €์งˆ๋ € +๋ณ€ํ˜ +๋ฐ”์šฐ +์ „๋‹จ +์ˆ˜๊ตฐ +##์ˆœํ™˜ +์šฐ์ฒด๊ตญ +์กฐ์ฒ˜ +ํ•œ์–‘๋Œ€ +์†กํŒŒ๊ตฌ +๋ฐ•์ฃผ์˜ +์กฐ์ข…์‚ฌ +์ฒผ์‹œ +์กฐ๋กฑ +์–‘์ˆ˜ +ํƒœ์›Œ +๊ตญ์œ  +##ํŒฉํŠธ +๊ฐœ๋ง‰์‹ +์ฝ”์นญ +์ ํ +์ผ์ฐจ +์•ˆ๋ณด๋ฆฌ +ํšŒ๋ณต์„ธ +ํ•˜ํ•„ +๋„๋ฃจ +##์›Œํ–ˆ +๊ทนํ•œ +์ž๋ฆฟ +ํ† ํ†  +์šฐํŒŒ +๊ฑฐ์ณค +์žฌ์‹ฌ +ํก์ž… +๊ฒฌ๋ŽŒ +์‹ ๊ณก +์•ž์ชฝ +##ID +์‚ฌ์ฑ„ +์ด์ œ์•ผ +๋ฆฌํ„ฐ +##๋‚˜๋ฅด +์ˆ˜๋ ˆ +๋ถˆ์–ด +์šฐ์™€ +๋‹ค๋‹ˆ์—˜ +166 +๊ดด๋กญํžˆ +๋ฐ˜์žฅ +##๋ฌด์†Œ +์ฒด๋‚ด +146 +๋””์•„ +๋ฏธ์นœ๋‹ค +์ด์€ +##IN +๋Œ€๋งˆ +๊ถŒ์˜ +๊ท€๋† +##con +ํ›„์ฟ ์‹œ๋งˆ +Soc +##์นด์™€ +๋‹จ๋‹จํžˆ +ํ•ด๋‚ผ +๊ฐ€์ฒ˜๋ถ„ +์‚ฐ์ˆ˜ +ํŒ๋ฌธ์  +์–˜๊ธธ +##๋ฆฌ์— +๊ฑด๋„ธ +๋ง›๋ณผ +์ง€๋…” +##์›์ง€ +๋ฆฌ์ฝœ +๋ฉ”์‹ ์ € +๋™ํ•™ +SF +##๊ตฌ์น˜ +##ash +์†Œ์‹ํ†ต +๋ฒŒ์–ด์งˆ +230 +์ค„์ง€ +๋„“์ด +##๋‚ด๊ธฐ +๊ทผ์‹ฌ +๋ณต์„  +ํ”Œ๋ผํ†ค +๋Œ๋ด„ +์„œ๋น„์Šค์—… +๊ฒฌ์ง€ +๋ฒŒ์ผ +์–ด๋งˆ์–ด๋งˆ +์ฐฉ์ทจ +##๊ณ ๋ž˜ +##์‚ฌ์ƒ +์ดํ˜ธ +ํŒŒ๊ณ ๋“ค +๊ฑฐ๋‘ฌ +๋ฐฉ์‚ฌ๋Šฅ +์šธ๋ ธ +De +๊ฒฝ์ƒ‰ +์ˆจ์ง€ +๊ฐ€๋‹ฅ +๊ณต์šฉ +๋ฎค์ง€์…˜ +์ ์ง„ +์†ก์ „ +๊ณ ์šด +์›์‚ฐ์ง€ +๋‹ค์ด์–ดํ„ฐ +์žฌ๋ฌผ +์Šคํ€˜์–ด +##ase +์›ํ•œ +์กด์Šค +##ree +๋‚ฑ๋ง +์ธ์ถœ +##ํ‰์ด +๋ฌธํ„ฑ +์ƒ์Šต +์œ ํ™” +137 +##๋ณด๋‚ด +๋ฒ„๋ธ” +##๊ธฐ์‚ฌ +์‹ฌ์•ผ +์†์ต +ํšจ์ž +๋ธ”๋ผ๋”” +ํ™”์š”์ผ +์„ฑํฌ +๋ชจ์„ธ +##๋ฆฌํžˆ +์˜ค๋ผ +##์Šนํ™˜ +์„œ์‚ฐ +์ค˜์š” +์ž„๋ฐ• +##che +์ „์ „ +๋ถˆ๋Ÿฌ์ผ์œผํ‚ค +๋ฒ ๋“œ +MP +๊ตฌ๋ถ€ +##iti +๋ฌผ๋ ค๋ฐ› +์ˆ˜๋งŒ +๋ชฐ๋ ค๋“ค +๊ฐ€๋ฝ +์ด์  +##๊ต๋„ +์ž…์†Œ๋ฌธ +##๋“ค์˜€ +์ค‘์„ฑ +ํŒ์„œ +๊ทธ๋ž‘ +๊ฐ๋ฆฌ +๊ฐ•ํ–ˆ +๊ตฌ๋™ +์ฒ˜ํ•˜ +์œ„์ชฝ +์ถฉ์‹คํžˆ +๋Œ€๊ด€ +์šฉ๋ˆ +์ž…์•ˆ +์˜จ์ „ +๊ธฐ๊ด€์žฅ +์„œ๊ฒฝ +์Šคํƒˆ๋ฆฐ +ํ˜ธ๊ฐ€ +๋ชจ๋กœ +์ƒ์ฒด +##์—ฌ๋Œ€ +์•ˆํ†  +์กฐ์ด +ํฌ์ฒœ +ํ•˜๋งˆ +ํ‰์ƒ์‹œ +์ดˆ๋ผ +๊ทธ๋ฆฌ์›€ +์‚ฌ์šฐ๋””์•„ +์˜ค๋„ˆ +๋งค๊ฒฝ +ํ•˜๋ฃป๋ฐค +์›Œ์น˜ +ํ•™ํŒŒ +ํ•ฉ์ฐฝ๋‹จ +148 +์ฟ ํ‚ค +##์‚ฐ์„ฑ +PE +##๋ฆด๋ผ +##์ˆ˜์•„ +์ค‘์–ผ๊ฑฐ๋ ธ +##ํ•˜๋‹ˆ +์–‘์–‘ +ํ‰์ • +ar +์ƒ˜ํ”Œ +์‰ผํ„ฐ +์ผ๋ณด +ํ•œ๋‹จ +์ง€์ง€์ธต +์‚ฌ์—…๋‹จ +์ด๊ทœ +์ง€์ณ +์“ฐ๋Ÿฌ์ ธ +์ด๋ผ๋“ ์ง€ +์‚ฌ์ฃ„ +์—ฐ์ž„ +์ฐฌ์‚ฌ +์ž…์ฃผ์ž +์นผ์Š˜ +์น˜์ค‘ +์•„ํ”„๊ฐ€ +๋ณด์ƒ๊ธˆ +ํœ˜๋ง +##ound +๊ฐ€๋“  +##๋ฆฌ๊ทธ +##๋งˆ์„ +์žฅ์ฐจ +##๋ฌด๋‹ˆ +ํ˜ธ์œ„ +๋ถˆ๋ฌธ +๋‹จ๊ฒฐ +๊ฒฐ์˜์•ˆ +์•„์ด์–ธ +๋ฌด๋‚œ +##๊ธˆ์† +์•ˆ๋ฝ +๋…์„ฑ +์‚ฐํ™” +์—”์ง€๋‹ˆ์–ด๋ง +๋‚˜ํ˜ +๊ฐ–๊ฐ€์ง€ +1944 +๋ฉ์น˜ +ํŽ˜๋ฃจ +116 +์–‘๋ณต +์ง‘ํ–‰์œ ์˜ˆ +๊ธฐ๋ ฅ +์ƒ๋ฒ• +##up +์‚ฌ๊ฐ์ง€๋Œ€ +ํŽธํ–ฅ +๊น€์šฐ +๋ชจ๋…ธ +133 +๋ง์›๊ฒฝ +์ž‰์—ฌ +He +๋…ธ๋น„ +์ด๋ฏผ์ž +๋ผ๋งˆ +ํ†กํ†กํžˆ +ํฌ๋„์ฃผ +์—ด๋žŒ +์žฅ์• ์ธ +์„ ๋œป +๊ตฌ์ž +๋ถ™๋“ค +์ฒญ๊ฐ +์†Œ๋ž€ +๊ฑด๋„ˆ๊ฐ€ +๋™๋ž˜ +๋‚จ์–‘์ฃผ +ํ”ํ•˜ +๋ฒˆ๊ฐœ +์ •์ฐจ +์•„ํŒ  +์˜ฌ๋ฆฐ๋‹ค +๋ณผํŠธ +์นดํŠธ +๊ณ„ํš์„œ +167 +๊ฐ€๋Š˜ +์ œ์ž‘์ž +๋ชจ์ฒ˜๋Ÿผ +##์•”๋™ +132 +์ค‘๊ณ ์ฐจ +๋‘์–ด +##๋ฐฉ์‹ +๋งˆ๋ˆ„ +๋งˆ์˜ค +##์˜ฌ๋ผ +๋ฆฌ๋ผ๋Š” +ํ”Œ๋ž˜์‹œ +์ฒด์œกํšŒ +์‹ฌํ”Œ +๊ฐ์ธ +##์Šคํ‹ด +๋‚ด๋ ค์™” +OLED +์นจ์‹ค +์ฝ˜์Šคํƒ„ +์„ฑ์‚ฐ +๋Œ€๊ตญ๋ฏผ +๋‹ค๋ค„ +์•„ํ”„๊ฐ€๋‹ˆ์Šคํƒ„ +๋ฏธ๋ฅด +์ˆ ๋ถ€ +๋ฉ”ํƒˆ +๊ต๋ณด +์˜์˜ˆ +์ˆ™์„ฑ +๋ฌด๋„ˆ์ ธ +์–‘๋ฉด +์Šค์™€ +##๋งค๋„ +##are +๋ธŒ๋กœ๋“œ +##์€์ƒ‰ +๋ฐ”๊ฟ”์•ผ +๋‚จ์•„ํ”„๋ฆฌ์นด +์•„๋ž‘ +ํœ˜๋‘๋ฅด +ํ‰์ฐฝ๋™ +๋ฉ”์ด์ง€ +ํ—ค์–ด์ง€ +์‚ฌ์šฉ๋Ÿ‰ +๊ด‘๋ฌผ +์›ํšŒ +ํ’์„  +##enti +๋Œ์•„์„ฐ +##๋ฏธ๋””์–ด +๋ฌด์ž +##enc +##๋‹ค์ด +๋ฌด์ง„ +์—„์ • +์˜€์Œ +์žฌ์ด‰ +์ง€๋‚˜๊ฐ„ +์‚ฌ์šฉ์ž +##HD +์šฐ๋ฃจ +์‚ฌ์šฐ๋””์•„๋ผ๋น„์•„ +์ดํ™”์—ฌ๋Œ€ +์‹œ์—ฐ +์ŠคํŽ˜์ด์Šค +ํ† ์ฐฉ +##์žฅ๋ฉด +๋‹ค๋ฐœ +์˜ค๋ฆฌ์ง€๋„ +##๊ณผ์ด +์ž์ž‘ +##์ผ€์ธ +์šฐ์Šน์ž +์›์‹ฌ +๋ชปํ•œ๋‹ค๋Š” +์šฉ์ธ์‹œ +ํ™˜์ˆ˜ +์ž๋ฃจ +ํœด์ „ +์Šน๋ฅ  +์†ํ•™๊ทœ +ํ›„์„ธ +์•„๋ฌด๊ฐœ +๊ธ‰์ œ +๊ตฐ์ฒญ +๊ธฐ๋ปํ•˜ +์œ„๊ณ„ +๋ฒŒ์ธ๋‹ค +์•…๋ชฝ +1936 +์†Œ๋ณ€ +##ํ†ต์ผ +์œ ๊ณต์ž +๋ง์—†์ด +๋ฌด์‹ค +์œก์ˆ˜ +๋‹นํ˜น +์ˆ˜์ •์•ˆ +์†Œ๋… +ํ”„๋กœ์ดํŠธ +ํœด๊ฒŒ์†Œ +์ง์† +๋ถˆ๋ ค +๋‚ด์‹œ +174 +๋„์™” +##ํŒจ๋“œ +์„ฑ์žฅ๋ฅ  +๋…ธ์› +์•Œ๋ ‰์‚ฐ๋“œ +์ณ๋“ค +๋ฐฉํ™ฉ +์˜๊ตฌ +์ˆ˜์ปท +147 +์—ฐ์ • +๊ทธ๋Ÿฌ๋‹ค +์ƒ์„ค +๋ฐœ๋™ +์‚ฌ์šฐ์Šค +##ํฌ๋ผํ…Œ์Šค +์ž์ฒ˜ +์„ ํ˜ธ๋„ +##๊ธฐ์ˆ  +๋ฉ”์ปค +๋‹ค๋ฃฐ +์œ„์ด‰ +##์ง€์˜ค +1951 +์ˆ˜๋ฐฑ๋งŒ +๊ฑฐ์„ผ +๋ณดํ˜ธ์ž +ํœด์ง +๋ณด์„  +ํ•˜์šฐ +๋ ˆ์ด๋” +IBM +์šด์˜๋น„ +124 +๋…ธ๋ž€์ƒ‰ +ํ˜ˆ๋‹น +์ƒ๋ช…๋ ฅ +๊ฐ์งˆ +##๋Ÿฌ์›  +๋ฐฐ๋“œ +์—ฌ๋‹จ +๊น€์€ +์›€์ผœ +์ด์˜ +์งˆํƒ€ +๊นŒ๋งˆ +๋…ธ๋ ค๋ณด +##์•ผ์ˆ˜ +๋ฏธ๋ จ +์—ํ‹ฐ +138 +126 +์ถ”๊ถ +ํ„ธ์–ด +##์ˆจ์— +๋‹จ์ˆจ์— +TP +๋ฐ”์Šค +##๋ง์ด +๋งฅ๋„ +๊ผญ๋Œ€๊ธฐ +##์‚ผ์„ฑ +์•„๋ž˜์ชฝ +์ •ํ˜„ +๋‘๊บผ์šด +##๋ฐ๊ธฐ +๋ถ€ํ˜ธ +์ด๊ฐ• +๊ฑฐ๋“ค +##ae +์Šค์ณ +์ด๊ด€ +์ˆ˜์ต๊ธˆ +์‹ ์•ˆ +์กฐ๊ณ„ +์ผํšŒ +##๋…ธ์ด +์•”์ปท +1937 +๋ฏธ์ณ +์ฒญ๋™ +##meric +์”จ๋ฆ„ +ํƒ„์‚ฐ +์•„์ด์ฝ˜ +๋‹จ์ถ” +๋งค๋ชฐ +๋ณด์ง +์ดˆ์—ฐ +๋งž์ท„ +143 +์™ธ๋กœ์›€ +์œ ๋ฅ˜ +diet +๋™๋ฌผ์› +๋ฐ”๋‹ท๋ฌผ +์•ˆ๋ฌด +ํ•จ๋ฝ +##๋Š”๋ฐ +์••์ˆ˜ +์‚ฌํƒ• +๋ฌด๋ถ„ +๋‚ด๋ ค์•‰ +##ํƒ€์ž„ +๊น€๋‚จ +์ ๋ฒ• +##ron +๊บผ๋ฆฌ +๋ฒ„๋“œ +ํ™˜ํ•˜ +๋จน์—ฌ +๋ฉ”์ปค๋‹ˆ์ฆ˜ +##์ฟ ๋ฒ„ +๋ง‰๋ฐ”์ง€ +##ink +์ธ๋ถ„ +์‚ฌ์Šด +์ •์ฐฐ +๋„๋ง์น˜ +๊ต๋‚ด +๋‹ด๋ฐฑ +๋„์šฐ๋ฏธ +์—ฌํ–‰์ง€ +์žฌ์œ„ +##์Šน๋ถ€ +๋ชจ์กฐ๋ฆฌ +Col +๊นŒ์ง„ +ํŒ๋„ +์š”ํŠธ +##act +๋‚ด๋ชฐ +์ฐฉ๋ฅ™ +##๋ฒ„์„ฏ +๋…์ฃผ +์ง€์น˜ +์–ด์กฐ +์—ฌ๊ฒจ์ง„๋‹ค +GE +๋‚ด๋ฆฐ๋‹ค +ํŠน๋ณด +์žก์•„๋จน +๋ฆฌ๋ผ๊ณ  +๋”ฐ๋ฅธ๋‹ค +๋ณด์ž…๋‹ˆ๋‹ค +๊ฑฐ๋จธ +ํ›„์ฟ ์˜ค์นด +##์ž๋ฝ +์ฐจ๋ช… +NS +##itt +์ฐจ๋งˆ +์‹ค์—…์ž +์ˆ˜๋‚ฉ +๋”๋”์šฑ +##๊ฐ€์ง +์šธ์ง„ +์žฅ๋Œ€ +๊ฒฝ์ด +์ฒญํ•˜ +์Šคํ…Œ์ด์ง€ +##๋ฒ„๊ฑฐ +์ตํ˜€ +ํ† ๋ชฉ +##ake +์ตœ์ƒ์œ„ +์•ž์„ฐ +๋ฏธ๋ชจ +๊ด€์ ˆ์—ผ +ํ‡ด์žฅ +##ys +๊ทœ์œจ +์ง€๊ธˆ๊ป +as +์ˆ˜์„ฑ๊ตฌ +##์ฐฝ๊ธฐ +๋ผ์šด +ํฌ๊ฒฉ +##์žฅ๊ตฌ +๋ณผ๊นŒ์š” +์™ธ์šฐ +์ฒญ๋„ +์ถœํ˜ˆ +์š”์ง€ +ํŒ”๋ ค +๊ต๊ตฌ +๋งค๋‹ฌ๋ ค +๋ฐ๋ ค๊ฐ€ +์˜๊ตฌ์‹ฌ +์ด๊ฑดํฌ +์•„์ด๋Ÿฌ +๊ฐœ์ˆ˜ +๋“œ๋ž˜ํ”„ํŠธ +์„ ๊ตฌ +๊ฑด๋Œ€ +์ด๋ผ๋“ ๊ฐ€ +๋ฐœํ‘œํšŒ +๋ฒ•์ƒ +์‚ฌ์—ญ +##ner +##์‹ญ๋‹ˆ๊นŒ +๋™๊ฑฐ +ํƒˆํ”ผ +๊นจ์ง€ +์ตœ์ง„ +Gre +์ „์—ผ๋ณ‘ +ํ•˜ํ•˜ํ•˜ +์นœํ•ด +์ž…์ฃผ๋ฏผ +์„ฑ๊ธˆ +ํด๋ฆฌ์Šค +๋งˆ์นด +์•ฝ์‚ฌ +์ž๋ฆฟ์ˆ˜ +์†Œ์ƒ +ใ†๊ตฐ +##ํด๋ฆฐ +์„ธ์…˜ +##amp +์˜ฌ๋ผ์™€ +๋Œ์–ด๋“ค์ด +๋ฐฉํ™” +##์˜์ • +์—ฌ๋“œ๋ฆ„ +์•ˆ์ค‘ +์žฅ๋ณด +๋ชจํ‹ฐ๋ธŒ +##๊ฐ‘๋‹ˆ๋‹ค +์œ ํƒœ +๋Œ€์—ด +์„ ํ’ +##์†Œ์Šค +ํ”Œ๋กœ๋ฆฌ๋‹ค +์˜์„ฑ +๊ฒฝ์‚ฐ +๊ผฝํ˜” +๋‹ค๊ธ‰ +๋‚จ๊ทน +์š”๊ฐ€ +์‚ฌ๊ณ ๋ฐฉ์‹ +์—ฌํƒ€ +์‹œํ˜ธ +ํŒ์†Œ๋ฆฌ +##์ œ๊ตญ +๋ฐฐ์–‘ +์ƒˆ๊ฒจ์ง„ +ํ•ญ์„ฑ +๊ทธ์ง€ +๋“ค์œผ๋ฉฐ +๋ธŒ๋ž˜ +ํƒ์š• +๊ณจ์งœ๊ธฐ +์ค‘๋Ÿ‰ +TO +์—์„ธ์ด +์ง€๋ฐฉ์„ธ +##์•…๋‹จ +์ œ๋ฌผ +์‹ธ์šธ +##์  ํ…Œ์ด์…˜ +ํ”„๋ž‘์Šค์–ด +142 +ํ˜ธํ™ฉ +์›๋ณธ +์นจ์ˆ˜ +##TC +๋ฌด๋‹น +์„œ๋ถ +๋Œ์•„๊ฐ„ +๋Œ€์˜ +์ฑŒ๋ฆฐ์ง€ +๊ณต๋ณด +๊ฐ€๋ฅด์ณค +1400 +์ง„ํ™ +์ฆ๊ฒผ +๋Šฆ์ถ” +์„ฑ๋„ +365 +๊ณตํ™ฉ +๊ทน์šฐ +DV +์ฝ”์—‘์Šค +##az +๊ฐ€์ด +์ง€๋ถ€์žฅ +์ฆ์„œ +##ํ˜„๋™ +##๋‚˜๊ฐ” +์„ฌ๊ธฐ +##์•ผ๊ตฌ +ํ…Œ๋ผ +ํƒœ์•„ +๋ฐฉํŒจ +ํ•œ์‹œ +PP +๊ฐ€๋ช… +Me +์ œ๊ฒŒ +๋ฏธ์ˆ˜ +๋์—†์ด +์ถœํ’ˆ +TH +์ปค๋ฆฌ +์œ ๋ชจ +์‚ด์•„์™” +ํŒŒํ–‰ +์ „์ฒ˜ +Par +๋„๊นจ๋น„ +์–‘๋ง +์žฅ๋…„ +ํŒ”๋ ธ +๋‚™ํ›„ +##ater +ํ•ด๋‚จ +์ค‘์ฆ +์†Œ๊ฐœ์„œ +PGA +๋‚˜๋“ค +๊ตญ๋ฌด๋ถ€ +ํ† ๋ง‰ +ํƒ„๋„ +๋ฒŒ๋–ก +๊ณ ์œ„๊ธ‰ +๋ถ„๊ถŒ +๋“ค์–ด์„ ๋‹ค +์™”์œผ๋ฉฐ +##์•„์‹œ์•„ +์ฆ๊ธฐ +๋งž์€ +๋ฏธ์ŠคํŠธ +์กฐ์ธ +1938 +์ด์ฒญ +๋ฌด๋ฐฉ +๋ฐ˜ํฌ +##๋ถ€์ง„ +์ •์น˜๊ฐ€ +์‚ฌ๊ณ„ +์ถฉ๋ฌด +๋ชจ๋น„์Šค +Res +์ค„๊ธฐ์„ธํฌ +๋ช…์‹ค +์–ด๊น€์—†์ด +๊น€ํ™ +์ผ๊ฐ„์ง€ +์น˜๋ฅผ +๋ฐœ๋ถ€ +๊ธฐ์ˆ ์› +๋ฐฐ๋‚ญ +##๋ธ”๋ฆฌ +##๋ฐ”๊ฒ +์š”ํ•˜ +ํ”ผ์ธ  +##HO +๊ทธ๋ ค์ ธ +##์ฒด์–ด +๋‚˜๋“ค๋ชฉ +์Šคํ‹ฑ +์ปค์ ธ +์ธ๋ฏผ๊ตฐ +์˜๋ฆฌ +์ด๊ธด +๋‚ด์‹ฌ +๊ต์ฐจ๋กœ +์ง€๋‚  +์ฐจ๊ฐ‘ +##์ŠคํŠธ๋ฆฌํŠธ +๊ตถ์ฃผ +๋ˆ„๋ ธ +์ž„์‚ฐ๋ถ€ +์‹œ๋ฎฌ๋ ˆ์ด์…˜ +์”์“ธ +๋ฐ•์ฐฌํ˜ธ +๊ตฌํ•œ +์กธ๋ผ +์„ฑํ™” +์žฌ์งˆ +์•Œ๋ผ +๋ง์„ค์ด +์•”ํ‘ +๊ทธ๋žœ +์ž๊ตฌ +##์นด์ด +์ž๋ณธ๊ธˆ +๊ฒฝ์ƒ๋„ +์ˆจ์กŒ +ํ—ˆํ—ˆ +ํ˜„๋Œ€์ œ์ฒ  +๋น„๋ฐฉ +ํ™”์ˆœ +์‚ฌ๋ นํƒ‘ +ํ•ด์ € +์ง€๋ฐฉ๋ถ„ +ํ™ฉ์‚ฌ +Be +ํœ ์ฒด์–ด +์ง„๋‹ค๋Š” +๊ฒŒ๋ฅด +์ง„๊ฒฉ +๊ทธ๋ ค์ง„ +๊ฑฐ์Šค +์ด์งˆ +์—ฐ๋ฉด์  +์ง์˜ +๋ฆฌ๋ฐ”์šด๋“œ +์ดˆ์ฐฝ๊ธฐ +์ถ˜์ถ” +์Šค์ฝ”์–ด +๋น„์žฅ +๋™ํ˜ธํšŒ +##ool +BS +์ €๋ฒˆ +์ค‘์ƒ +๋„˜์–ด๊ฐ” +##ํ„ฐ์Šค +๋งŒ์ ธ +์ƒ๊ฐ• +๋งž์€ํŽธ +๋ฐฉ์ •์‹ +##์…”๋ฆฌ +์–ผ๋ฆฌ +์ฝ”์นด +๋ฆฌ์–ด +๋Ÿญ์…”๋ฆฌ +๊ณตํ—ˆ +์Šˆํผ๋งˆ์ผ“ +์ €๊ฐ +๋ˆ์งˆ +์ง„์ˆ˜ +##์ž์žฌ +##์ฟ ๋ผ +##๋ณด์ด +์„ผํ‹ฐ +๊ฐ„๋‹ค๊ณ  +๋””ํ…Œ์ผ +์†Œ์‚ฌ +๊ณต๊ต์œก +๋ผ์šฐ +๋ฐœ๋ Œ +์ž๋ฝ +๋‚ด๋ ค์˜จ +ad +ํŒ๋กœ +์˜์ข… +์„œํ•ด์•ˆ +๋จธ๊ธˆ +์ „๋ผ๋ถ๋„ +๋‹ค์šฐ +๋ฆฌ๋ฒ  +๋ณธํšŒ +์‚ฐ๋ถˆ +์‹๋ณ„ +์•„์„ธ์•ˆ +์ •๊ฐ€ +์‚ดํŽด๋ณธ +๋…ผํ•˜ +๋ด‰๊ธฐ +์ˆ˜์ถ• +๊ฐ€์†” +##fic +๋งค๋„ +ํ†ตํ‹€ +๋ฉ”ํƒ€ +##ll +์ž์—ฐ์Šค๋ ˆ +์ถค์ถ” +ํ„ธ์–ด๋†จ +Americ +##์‚ฌ๋ฅด +์ตœ์žฌ +ํ‰ํŒ +์ฃผ๋œ +์†Œํ–‰ +์›์  +๋น„๋ฐ• +๊ธฐ๋ฐ€ +์กŒ์œผ๋ฉฐ +##iew +๊ณผ๊ฐํžˆ +์‚ฌ์ฃผ +SO +๋กœ์šฐ +ํ•œ๋ชซ +ํ•˜์ฐจ +์ด์žฌ๋ช… +๋งˆ์ฃผ์น˜ +ํŠธ๋žœ์Šค +Gl +##๋ถ€์ƒ +1942 +์‹ ๋ขฐ๋„ +๋ณด๋ฅด +๊ฑฐ๋ฃฉ +์ž…ํžˆ +##๋Œ€์žฅ +ํ™”์„ฑ์‹œ +DC +์ฝคํ”Œ๋ ‰์Šค +์ „๋ง๋Œ€ +์œ„ํ•จ +๊น€์„ธ +##์˜ํšŒ +##ress +์„ฑ๋ช…์„œ +๊ธˆ์‚ฐ +๋ฉ์ฒญ +๊ฒฝ์ง +์ž…๋‹น +์˜๋“ฑํฌ +ํƒœ๊ทธ +์˜์œ„ +์—ฐ์ „ +๋ถ€์‚ฐ๋Œ€ +##์‹๋ฌผ +์ „์„ธ๊ธˆ +์‚ฌ๋ผ์งˆ +์•…์ˆœํ™˜ +์—ฌ์šด +์‚ฌ๋ฌด๊ตญ +##ime +๋พฐ์กฑ +##๋‹ค๋ฅด +๊ตญ๋ฌธ +๊ณจ๋ชฉ๊ธธ +๋‹ˆํŠธ +๋ณต์ˆญ์•„ +ํ™œ๋™๊ฐ€ +๋ฆฌ๊ฐ€ +ํญ์Šค๋ฐ”๊ฒ +ํ‰ํ˜• +์‹œํ•ฉ +๊ณต์‚ฌ๋น„ +##๋ ˆ์Šคํ…Œ +์ฝœ๋กœ +ํผ์ŠคํŠธ +##๋ฏธ์•ˆ +##ํ˜๋ช… +์œต์ž +๋†๊ฒฝ +์ค„๊ฑฐ๋ฆฌ +๊ท ์—ด +์ฑ™๊ฒผ +follow +##๋ฃจ์—” +๋ฉ€์ฉก +ELS +AFC +์ด๋ค„์ ธ์•ผ +129 +๊น€ํ•™ +##๊ธˆ์œต +1933 +๋ถ„์‹ +๊ณ ํฅ +์šฐ๋‘ +๋”ฑํžˆ +์ผ๋ฅ˜ +##AA +์ƒํผ +##์Šฌ๋ง +์ฒญ๋‹ด +์ค‘๋‚จ +##์—ํŠธ +๊ด€๋ก€ +๋ฐ˜๊ฐ’ +210 +๋‚œ๊ฐ +un +์ŠคํŠธ๋ผ +๊ฐ€๋‘ +##์ž„์ž +##ans +๊ถ๊ถ +์ˆ˜ํผ +๊ฑฐ์žฅ +์„ฑํ’ˆ +๋“œ๋กœ +๊ตญ๊ณต +์ˆ˜์–‘ +์ถฉ๋งŒ +ํ™˜๋งค +##ild +##์นด์ด๋„ +ํ˜ผ๋ˆ +์ž์—ฐํžˆ +์ถœ์› +์ข…์‹  +์นด์ž +๋“ฑ๊ทน +ํ† ๋ฒŒ +280 +๊ฒฝ๋ณต +๋“ค์–ด์•ผ +์‹ ์ดŒ +KS +Fran +##cl +์š•์‹ค +์„œ๋Œ€๋ฌธ +์—‰๋ง +ํž๋Ÿฌ๋ฆฌ +๋„๋ง๊ฐ€ +##๋ถ€๋ชจ +์–ด์šฐ๋Ÿฌ์ง„ +๋น„๋ฃŒ +๋‘์„ธ +์œ ๋กœ์กด +๋ชฐ๋ผ์š” +์™ธ๋กญ +์นดํ…Œ +๊ธฐํš์‚ฌ +์˜๋‚จ๋Œ€ +##๊ทธ๋Ÿฌ +๊ฐˆ์•„์ž… +๋„๋•์ด +๊ตฌ์„๊ตฌ์„ +์—˜๋ฆฌ์ž๋ฒ ์Šค +๊ธฐ์ด +๋„์‚ฐ +์Œ๋ฐฉ +์ผ์ปฌ +๋น…๋ฑ… +์š”๋ฏธ +##๊ฐฑ์ด +๋ถ€๋ฅ˜ +๋Š˜์–ด๋‚œ๋‹ค +์„ธ๋ฅด๋น„์•„ +๋‚˜ํƒ€๋‚ธ +๋ฐ”๋น„ +์™€์•ผ +##๋ฑ์Šค +์ง€๊ธฐ +๋ถ€ํ†ต๋ น +##๋งž์ด +๋ชจ์—ฌ์„œ +์˜จ๋‚œ +KE +##ower +๋Œ€ํ‘œ์ž +750 +์žฌํ™” +๋ง‰๋ง +๋น›๊น” +์•Œ์•„๋‚ด +์˜๋• +์ตœ์ • +์‚ผ๊ฐ€ +๋กœ์Šค์ฟจ +์• ๋„ +๋‹ค๋Š”๋ฐ +##ric +##๋ฒ ์ด์…˜ +์ฑ™๊ธด +๊ตฌ๊ธ‰ +๋ณ€์งˆ +์ƒ์‹ +๋„์‚ฌ +๋ฏธ์ˆ™ +์žกํ˜” +##row +์ œ์ง€ +ํ•œ์—†์ด +์‹ ๋ช… +ํ˜น๋… +๋ชจ๊ณต +์—ฌ๊ฒจ์กŒ +ํ”Œ๋ฃจ +๋ƒ‰๋ฉด +ํ•˜๋‹จ +์ง€์ฒญ +์šฐํฌ๋ผ +์˜ฌ๋ผ๊ฐ„๋‹ค +ํ™ˆ๊ฒฝ๊ธฐ +169 +PM +์ผ€๋„ค +์•„์ด๊ณ  +๊ผฌ๋ฐ• +์ด๊ตญ +์„ฑ๊ธฐ +์˜๋„ +์‹ฑ์‹ฑ +ํฌ์ฆˆ +๋ฌด๋ถ„๋ณ„ +์˜ˆ๊ฐ +์†Œ๊ด€ +ํ‘ธํ‹ด +์˜๋ก€ +๊ด€์šฉ +MD +์ƒ๊ตญ +๋„ฅ์Šจ +๋ฌธ์–‘ +๊ฒฝ๊ธฐ๋ ฅ +์˜์ฒœ +##๋ ˆ์Šคํ…Œ๋กค +ํด๋ฆฌ๋‹‰ +๋ฌด์„œ์›Œ +##์›๊ตฐ +๋ฐฉ์–ธ +์–‘์ง€ +๊ฐ•์ฒ  +์˜คํƒ€ +๋Š”๊ตฐ์š” +๋‹ค์ž +์‹ค์ˆ˜์š” +๋Œ€๋ฉด +์ •์—ฐ +๋ฐ˜์ง์ด +๊ต์ œ +์ฝœ๋ ˆ์Šคํ…Œ๋กค +์„ธ๊ณ„์ธ +์„œ๊ฑฐ +๊ฑฐ์ธ +๊น€๊ด€ +๊ฐ•ํŒ +๋ฐฉํƒ„ +๋ž„๊นŒ +##์ปคํŠธ +134 +์กฐ์†ํžˆ +์‚ผ์ฒ™ +์™ธ๊ณผ +๋ถˆ๊ณ ๊ธฐ +330 +๊น€ํšจ +๋Œ€์ง€์ง„ +์Šน์†Œ +ํ†ต์งธ +##๋‚˜ํ•ญ๊ณต +๋นจ๊ฐ„์ƒ‰ +๋ฒ ํ†  +๊ธˆ๊ธฐ +๋…๋ฆฝ์„ฑ +##๋ฏธ๋ฅด +2022 +์ฆ๊ฐ• +์ง‘์•ฝ +ํ•˜์†Œ์—ฐ +๋Œ€๋‚ด +์‚ฌ๋ผ์ง„๋‹ค +๋‹น๋‹นํžˆ +๋‚™์„  +์˜ˆ์ˆ ์ธ +3500 +ํ•ด์  +VIP +๋ฌด์ด +์•„์‹œ์•„๋‚˜ํ•ญ๊ณต +๋ณธ์ง€ +๊ณ ๋ฅธ +##๋ฆฌ๋‚˜ +์€๊ทผํžˆ +##๋ฌด์ง„ +ํ•ด์ธ +์‡ผํฌ +##์ƒ์ˆ˜ +์งฌ๋ฝ• +์ปค์ง„๋‹ค +์œ ๋ผ +์ด๋ฐ”์ง€ +JTBC +๋ˆ„๋น„ +ใ†๋ฏธ +๋‹ต์‚ฌ +์•„๋‚  +##์Šต๊ธฐ +๋ฌด๋„ˆ์กŒ +ํ˜•๋ฒŒ +๋ถˆํ—ˆ +์น˜๋ถ€ +์˜ค์…˜ +๊นŒ๋‹ค๋กœ์šด +๋นผ๋Œ +๋Œ€๊ตฐ +๊ฐœ์—… +์น˜๋Ÿฌ์ง„ +์Œ๋ž€ +์ณ๋‹ค +IOC +์ˆ˜์šฉ์†Œ +๋‹ด๋‹ด +##ox +UAE +์ฝœ๋กฌ๋น„์•„ +์‹œ๋งˆ +๋ฏธ๋‚˜ +๋ฅด๋…ธ์‚ผ์„ฑ +ํŽ˜๋„ +1932 +ํ•จ๊ฒฝ +๊ต์œก๋น„ +๊ธฐ์œ +์ „๋ณด +##๋ถˆ๋ฆฌ +์†์žก์ด +์šดํ•˜ +๋ถ€๋Ÿฝ +์ดํ•œ +์ง„๊ณต +์ฃผ์ž„ +์ฒœ๋„ +๋ณ‘์‹ค +ํŠธ์œˆ +๋‹น๋„ +์†์ถœ +์ฃฝ์ผ +์•ฝ์‹ +์‚ฌ๋‚˜ +์นด์šดํ‹ฐ +์ „์ง‘ +ํŽธ์ง‘์ž +๊น€๋ฏธ +##yn +๊ฒฝ์ œํ•™์ž +๋งŒ์žฅ +์ˆ˜๊ฑด +ํœ˜๋‘˜ +##์ผ€์ด +์ Š์Œ +์‹ ๋ฏผ +์–ด์šฉ +์กฐ๊ด‘ +๋Š”๋ฐ๋„ +DMZ +์Šค๋Ÿฌ์šธ +๋ฌด๋”๊ธฐ +##์—ฐ๊ตฌ์› +๋ณผ๋ฅจ +##ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ +๋งŒ์—ฐ +๋‹น์œ„ +๊ฐ€๋กœ์ˆ˜ +์ „๊ฒฝ๋ จ +์ „ํ™”๊ธฐ +์„ ๊ธ€๋ผ์Šค +๊ทธ์ฃ  +์‚ฌ์ถ˜ +๋‹ˆ์ฒด +์‚ด๋ฆฐ +์ฐฌ์Šค +##ME +๊ถ์ค‘ +๋””์˜ค +๋ฐœํ˜„ +๋– ์˜ฌ๋ ธ +๋‚จ๋Œ€ +๋น„์šฐ +์งœ๋ฆฟ +๋ฐ›๋“ค +์ž‘์œ„ +ํผ๋ถ€ +๊ฐˆ๋ง +๋ผ์ด์˜จ +์ˆ ์ž” +##์ œ๋ฆฌ +๋‚˜์•„๊ฐˆ +##ON +๋ฐ”๋ผ๋ณผ +MBA +Ind +cl +ํŒ”๊ฟˆ์น˜ +์ด๋‹จ +์ฃผ๋ฒ” +์„ฑ๋ฌธ +๊ผด์ฐŒ +๋ณต์ง€๋ถ€ +์„œ๋ถ€ํ„ฐ +๋ง๊ฐ +์–ด๊ธฐ +์˜ฌ๋ผ๊ฐ„ +##ade +๋‚˜์Šค +##tiv +270 +๊ต๋Ÿ‰ +๊ตฌ์•ฝ +๊ฐ•๋ น +์„ฑ๋Œ€ +์žฌ์ผ +์‹ค์ข…์ž +ํ”ผ์•„๋‹ˆ์ŠคํŠธ +๊ฒจ๋ฃจ +์˜ต๋‹ˆ๋‹ค +์ดํšจ +๋„์šธ +1929 +##์•ˆ์ „ +์•„๋ฆฌ์Šคํ†  +์ƒ์†Œ +์—ฌ์ฃผ์ธ +ํ˜ธํ”„ +์ถ”์ถœ๋ฌผ +๋งˆํ” +์œ ๋…„ +์–ด์›Œ๋“œ +์ „ํ•ด์ง„๋‹ค +์›Œ์ปค +You +๋ฒ ํ† ๋ฒค +์›”์ŠคํŠธ๋ฆฌํŠธ +๋ฐ˜์ž… +์˜ค์นด +๋ฒ„๋ฆผ +๋‚˜๋ฏธ +๋ถ€๋ฅด์ฃผ +๋ฃจ์‹œ +์กฐ์—ฐ +๊ด€์•… +์ €๋ช… +๋ง๋ ค +์„ค๋ น +##ai +##๋ฒ ๋ฅดํŠธ +๋ญ‰์น˜ +์Šคํ‹ฐ์ปค +๋ถ€๊ฐ€๊ฐ€์น˜ +##AN +๋กœ๋“œ๋งต +und +์ง€๊ตฌ์ดŒ +๋™๋‘ +##ํฌ์ธ  +##ํ‘ธ์Šค +ํ‘œ์ ˆ +๋ฌด๋”์œ„ +์†๋‚ด +์ผ๋ถ€๋ถ„ +๋ฏธ๋…€ +์œ ๋‹ˆ๋ฒ„ +Har +์ž”์ž” +์กฐ์•„ +๋Œ€๋ช…์‚ฌ +ํˆฌ์Šค +๊ทน๋™ +##๋œจ๋ฆด +์†Œ์‹œ +์ด์†Œ +ํ›„๋ฌธ +pl +์ผ๊ฐ +ํ˜ธ๋‘ +์ทจํ•ญ +๊ฒฝํ•ฉ +๊ตฌ์ง์ž +๋ฝ๋‚ด +์Šฌ์ฉ +์ผ๋ ‰ +์Šคํ…Œ์ดํŠธ +##BM +ํŒ€์› +์ž ๊ฒจ +์ƒ์—ฌ +1943 +๋ฌผ๋Ÿฌ์„œ +์„ฑํฌ๋กฑ +์ดˆ์ƒํ™” +ํ’ํ†  +๋ด‰ํ™” +์ฒด๋ฉด +์™ธํ™˜์€ํ–‰ +์—ฐํ–‰ +๋‚™ํ•˜์‚ฐ +๊ฑด๋„ˆํŽธ +๊ธฐ๋…์ผ +์ฆ์‹ +173 +๋งˆ๋‚˜ +๋…์ผ์–ด +์•„์˜ค +๊ด€์ƒ +๋‚ด์„ฑ +๋ฐ”๋ž +๊ธฐ๋‹ค๋ฆด +๊ตญ๋ณด +์—”ํŠธ๋ฆฌ +##๋„๋ฆฌ +๊ฐ€๊ฒฐ +์ผ€๋„ค๋”” +๊ฐ์„œ +ํฌ์ง„ +๊ฑด๋งŒ +๊น€๋ถ€ +์ „์† +Korea +ํ–„๋ฒ„๊ฑฐ +์„œ๋ฆฌ +์Œ์ฃผ์šด์ „ +๊ด€๋ฆฌ๋ฒ• +์ˆœ์ข… +๊ฐ€์†”๋ฆฐ +์ง„ํ–‰์ž +ํŠธ๋กœํ”ผ +##์ œ์ด +๋ถˆ๊ฑฐ์ง„ +์œ ์‚ฐ๊ท  +์ƒ๊ฒจ๋‚œ +ํƒ€์ผ +์›”๋“ฑ +ํฌํ•ญ์‹œ +์‹ฌ๋ฌธ +๋Œ€ํƒ€ +ํˆฌ์‹  +๋งˆํฌ๊ตฌ +๋ฐ•์„ธ +์ฒจ๋ถ€ +์นธํŠธ +ํšŒ๊ท€ +๋ง์ปจ +๊ผผ์ˆ˜ +๋ฐค์ค‘ +์‹ฌ์‹  +์ด๋…ธ๋ฒ ์ด์…˜ +International +์ ˆ๋‹จ +๋กœ๋ผ +์ฒญ์ž +์ปคํŠผ +์–ด๋•  +์•„์šฐ๋”” +์น˜๊ฐ€ +ํˆฌ์„ฑ์ด +๊ฑฐ์ฒ˜ +##๋ง‰์ด +๋‹ˆ์ฆˆ +๋ ˆ์ผ +์žฅํƒ€ +๋ฌด์ž‘์ • +๋ถ€๋”ช์น˜ +๋Šฅ๊ฐ€ +์†ก์˜ +ํ™•์ง„ +##tit +์ž์—ฐํ™˜๊ฒฝ +๊ทธ์นœ +๋งก๊ฒผ +FM +์ด์‚ฐ๊ฐ€์กฑ +๋กœ๋ Œ +ํ›„๋ฐฉ +์ž์˜ +๋žœ๋“œ๋งˆํฌ +๋‚˜๋ฌผ +๋ถ€๋…€ +๋“ค๋ ค์˜ค +์•„ํ† ํ”ผ +๋ ๊นŒ์š” +##ere +1935 +๋Œ€์ „์‹œ +๋‹ฌ๋Ÿฌํ™” +์„œํ•œ +๋‚ด๋น„์ณค +์ด๋นจ +์˜คํฌ +๋ฏธ์ง€์ˆ˜ +๊ตฌ๋… +ํ…Œํฌ๋†€ +##rans +ํ‡ด์ง๊ธˆ +๊ต๋ฌ˜ +์–‘ํ˜„ +๋ฉ˜ํ† ๋ง +##๋„ˆ๋ฌด +์˜๋ฃŒ๋น„ +์ธํ•ด์„œ +์Šฌ๋ฆผ +์œ ๊ณ  +๋‚œ์ด๋„ +๋ณด๋„ˆ์Šค +##๊ฒฐ์Šน +๊ตํฌ +์ถœ๊ณ  +##์ฒœ๋™ +ํ‰๊ธฐ +์ œ์ธ +์ฐจ๋“ฑ +๋”๊ตฐ๋‹ค๋‚˜ +ํ•˜๋ถ€ +##ina +์—ฐํฌ +ํ•˜์—ฌํŠผ +์ฒ ์ƒˆ +์‹ ์ƒ์•„ +031 +ํ–‰๋ฐฉ +ํผ์กŒ +##์•„๋ฅด +์ •๋Œ€ +์™€์ดํŒŒ์ด +๊ตฌ๋ฅด +๋ฐฐ์ž„ +๋™๊ทธ +๋ถˆ์‘ฅ +##์ด๋‚˜ +320 +๊ทธ์น  +์ข…๋ž˜ +๋ณตํŒ +ํ•œํŒŒ +ํ—ค๋ผ +##ํ…์Šค +๋’ค์„ž +๊ฐ€์นญ +##๊ณต์› +๊ฐ€์œ„ +##์Šฌ๋ผ +ํ”Œ๋ผ์ด +๋ผํ‹ด์–ด +๋ฐฉ์ˆ˜ +ํ† ์š” +๋ฏธ๋ผ +ํƒ๊ตฌ +๊ธฐ์ €๊ท€ +๊นจ์–ด๋‚˜ +๋Šฅ์„  +์‹ฑ์–ด +๋…ผ์ฆ +์•Œ๋ ‰์‚ฐ๋” +์ฐฉํ•˜ +๋‘ํ„ฐ +์—ฐ๊ตฌ์‹ค +##์‚ฐ๋™ +๊ณ ๋ฃจ +์ •ํ•ด์ ธ +๋ฉ”์šฐ +์•„๊น +๊น€์„ +๊ธ‰๊ธ‰ +์บ์‹œ +์ง„๋‚˜๋ผ +ํŒ”๋กœ์šฐ +์žฌ๊ฒ€ +๋งํ•˜ +ํ™”๋ฒ• +๋Œ€์กธ +๊ฒŒ์œผ +์‚ด๋ฆด +ํ•ด๋‚ด +๋ฐ€๋ ˆ +๊ตญํ™” +์†Œ๊ฐ +##ํœด์ผ +์•Œ์•„๋ณผ +##๋ฃจ์—”์ž +ํ™˜๋ถˆ +์ง€ํ +์‚ฌ์ถ˜๊ธฐ +๋ชฉ๊ฑธ์ด +๋™๋… +GT +##๋ฐฑํ™”์  +์ ๊ฑฐ +์กฐ์„ ์—… +๊ธฐ๋ฅ˜ +๊ณ„์ง‘ +์ค‘์•™์ผ๋ณด +์ดˆ์› +๊ธ‰์„ฑ์žฅ +131 +๋ชธํ†ต +์˜ฌ๋ผ์„ฐ +๋‹ฌํ•  +์ž„๋Œ€์ฐจ +ใ†์ • +์ง‘์‚ฌ +๋ถ€๋‹ด๊ฐ +์ข…์–‘ +##๋กœ์ด +All +์‹ ์ • +๋ชจํ„ฐ์‡ผ +๋‹ฌ๋ผ๋ถ™ +๊ฒฉํŒŒ +๋ฒ ๊ฐœ +์–ด์šฐ๋Ÿฌ์ ธ +์• ์ฐฉ +๋ฌด์ƒ‰ +์ˆ™๋ จ +##๋ฆฌ์‹œ +##์‚ฌ๋ž‘ +##์•„ํ‹ฐ์•„ +##์‹ญ์‹œ์˜ค +๊ฐ•๊ธฐ +๋‹ด์•„ +๊ธˆ์„ฑ +๊ตฌํ–ˆ +ํฌํ™” +ํšŒ์ฐจ +ํ˜ผ์žก +์ •์ค€ +์–ด๋ฏผ +##๋งˆํ‚น +ํ™ฉ๊ต +๊ฐ€์ „์ œํ’ˆ +ํ•ญ์•„๋ฆฌ +๊ฒฌํ•™ +๋ฒ ํƒ€ +##๋ž˜ํ”„ํŠธ +๊ฒฝ์ œ์„ฑ์žฅ +๋‹ฌ๋ ค๊ฐ” +์„ ์ฒœ +๋‹ฅ์น˜ +ํ™”์งˆ +์ด๊ณต +260 +๋ฆด๋ ˆ์ด +์ทจ๋“์„ธ +##์„œ์Šค +๊ฒฝ๋Ÿ‰ +๊ฐ„์ ˆํžˆ +๋งˆํฌ +๋ฒค์น˜๋งˆํ‚น +์ธํ”Œ๋ฃจ์—”์ž +์ตœ๊ฒฝํ™˜ +์ปจ์„คํ„ดํŠธ +ํฌ๋กœ์•„ํ‹ฐ์•„ +ํŠน์‚ฐ๋ฌผ +๋“œ๋ ˆ +๋”๋Ÿฌ์šด +์†Œ๋น„์—ํŠธ +ํƒ€์ด์™„ +๋†“์ด +์žก์Šค +์ดˆ์•ˆ +๊ณฑ์ฐฝ +๋‚˜์ธ +์ค€์šฐ +์ฃผ์„  +##iel +์œ„์Šคํ‚ค +๊ฐ•์„œ๊ตฌ +์ด์˜ค +๋ฐ˜๊ฐ€์šด +๋‹น๊ถŒ +์ง€๋งŒ์€ +๋ถˆ๋ณต +๋ฐ˜๊ณต +์ดํฌ +๊ณต๊ด€ +์œ ํฅ +๊ฒฝ์ „์ฒ  +์ขํ˜€ +์ธ์–‘ +##ution +๋ฐœ๋ฆฌ +์˜์–‘์†Œ +์ง„ํ•˜ +๋‚จ๋„ +์ž…๊ฐ€ +์กฐํ•ฉ์žฅ +์šฉ์‚ฌ +์–ฝํžŒ +์ ˆ๋„ +๊ฑฐ๋ž˜์ผ +๋ฏผ๊ฐ„์ธ +์„œ๋… +์žํ–‰ +๋ฌด์‹ค์  +๋ถˆ์”จ +์—ฌ๊ฐ์„  +๋ฐฐ์›Œ์•ผ +์–ธ๋œป +๋‘๊ทผ +์ฐ”๋Ÿฌ +##์‹ค๋ก +์ธ์งˆ +๋ถ€์ถ”๊ธฐ +๊ด€๋ฆฌ๋น„ +์—ด๋“ฑ +์ˆ˜๋ฝ +๊ฐ€์—ด +์—ฌํ˜ธ +์šฐ๋Ÿฌ +์ค„๊ฒŒ +์•ž๋ฐ”๋‹ค +์ฒœ๋ช… +๋ ˆ์‹œํ”ผ +๋ชปํ•œ๋‹ค๋ฉด +์ดˆ๋น™ +๊ณ ๋‡Œ +์นผ๋ผ +๊น€์ œ +##์นด์Šค +RO +์ด๊ณ„ +์กฐ๋ฅด +๋ฆฌ๋ณธ +๋ฏธ์—ญ +##์ „๊ธฐ +##out +์‹ค์ฑ… +์–ด๋‘์›Œ +์†Œ์ธ +##์‚ฌํƒœ +์ œ์ž‘๋น„ +๋’คํ” +๋ฌด์–ด +๋งˆ์ทจ +๊ตํ–ฅ๊ณก +TE +๋ถ„ํ™ +์ฒ ๋„์—ญ +์—”๋”ฉ +๋”ฐ๋ฅผ +์ƒ์‚ฌ +๊ตด๋ณต +์ง์ข… +ํˆฌ์—ฌ +๊ฒฝ์ฒญ +๋ฐฐํƒ€ +์•”๊ธฐ +์•„์‚ฌํžˆ +์‚ฌ์Šฌ +๋ฐ”์˜ค +์„ ์ฒด +์„ ์ธ +๊ฐ•๋ฐ• +์‚ผ์„ฑ์ฆ๊ถŒ +๋ฐ•๊ธฐ +ํž˜์ฐจ +์—์„ผ์Šค +์ œํ›„ +์ŠคํŠธ๋ฆฌํŠธ +๋น—๋ฌผ +์ข…๋ถ +1914 +๊ทธ๋ฆฝ +์ฝฉ๋‚˜๋ฌผ +##์„œ์•ผ +๊ธฐ๊ด€์ง€ +๋ง›๋‚˜ +์ด์„  +๋ถ€๋”ชํžˆ +์„ฑ์•… +๊น€๊ทœ +๊ฐ€์™€ +##pl +๊ฐ์„ฑ +์„๋ฉด +ํ„ฑ์—†์ด +๋‚ด์คฌ +์„œ์‹์ง€ +์Œ“์•˜ +๊ตฐ์‚ฌ๋ ฅ +๋†€๋ž„ +ํ•œ์‹ฌ +๋Œ€๋ž€ +์„œ์‹  +์—ฐ์‹  +์†Œ์ˆ˜์ž +๋ฃจ๋งˆ +self +๋’ท๋ฉด +๋‹ค๊ฐ€์˜จ +it +##ํ‡ด์ง +ํ„ฐ์ „ +KD +๋งค๋‹ฌ๋ฆฌ +์ž”์—ฌ +๊ฐ์„ธ +์ ค๋ฆฌ +##๋ถ™์ด +๊ณต์† +203 +##๋ถ€๋ฆผ +์••๊ตฌ +๋‘๋“œ๋Ÿฌ์ง€ +๋ˆ„๋ฝ +des +์—ด์ค‘ +์„ ๊ฐ€ +์€์œ  +๋นŒ๋ฏธ +ํ‡ด์› +##ํ‹ฐ๋…ธ +์‹œ๋“œ๋‹ˆ +์—ญ๋„ +๋‚˜์™€์•ผ +๋˜๋Œ๋ ค +NE +bl +๊ณ„์•ฝ๊ธˆ +๋Œ๋ ค๊ฐ€ +๋ผ๊ณ ์š” +๋น…ํ† ๋ฆฌ์•„ +๋ฉด์น˜ +์ˆœ์ง„ +๋ณ€์ฆ +ํ๋ธŒ +์—ด๋Œ€ +์›๋‘ +๋ณต์‹ +์‹ ์ฒญ์ž +์žฅ๋ชจ +๋ฆฌํฌํŠธ +๋ง‰๋ถ€ +๋ฐ”๊ทธ +์ž์„œ +์ „๋ฐ˜๊ธฐ +์—ฐํšŒ +์ฐธ๋ฐฐ +์ฒœ์ • +ํŠธ๋Ÿฌ๋ธ” +ํ”Œ๋žซํผ +๊ฐ„๊ฒฐ +๋™๋ฐฑ +์•„๋ชจ๋ ˆ +ํŒŒ๋ณ‘ +์‹œ์ฆˆ +##์„ ๊ฑฐ +re +์ง€ํ•˜์ˆ˜ +๊ฐ„๋žต +๋ชฐ๋ฆฐ +##ile +1100 +์ฆ๋ช…์„œ +์ €๊ฒฉ +๋ฐ”์‚ญ +๋ฐ•์ค€ +์ตœํƒœ +##๊ฑฐ์ง„ +ํ•˜๊ธด +์ซ„๊นƒ +์–‘์‚ฌ +์…”๋„ +##์ˆ˜์‚ฐ๋ถ€ +์ถœ์ž…๊ตฌ +์ค‘์ƒ +ํšจ์šฉ +##๋ฅด๋“œ +##์šฐ์น˜ +๋ฐ์Šคํฌ +ํŒฌ์ธ  +๋ฆฌํ”„ +์˜๊ธฐ +๊ตฌ๋ฏธ์‹œ +๊ฑฐ๋‹ˆ์™€ +์ด‰๊ฐ +##๊ฐ€์ • +CNN +์›์น˜ +ํ–ฅ์œ  +์šธ๋ฆ‰๋„ +๋ณธ๊ตญ +์ •์•ก +๊ตฌ์•  +์˜ค์Šค๋งŒ +๋“์ด +๋ผ์ด์–ธ +##ํ…”๋ ˆ์Šค +dog +##์ค‘๊ตญ +์•„์ด๋Ÿฌ๋‹ˆ +์ŠคํŠธ๋ผ์ด +์ปค์ง„ +์œ ๋ฆฌ์ฐฝ +๋ง‰ํžŒ +์ฒ˜์šฐ +์“ฐ๋Ÿฌ +๋ฉœ๋กœ๋”” +์ด๋ฌธ +ํŒจ์ „ +ํ’ˆ์œ„ +ํ•ด์™ธ์—ฌํ–‰ +๋ฒ„์ง€ +์ปจํ…์ธ  +๋‘ฅ๊ธ€ +๋ถ€์‹œ์žฅ +์•„์ดํŒจ๋“œ +์ˆ˜์‹ฌ +์ธ๋ฐฉ +์ฐฉ์˜ค +์ƒˆ๋กœ์ด +๋กœํ”„ +์ˆœ๋ฐฉ +๊ทธ์ณ +##NC +1925 +์žฅ๋งŒ +์œ ๋ฐฉ์•” +##์ •์‚ฌ +๋‚ด์‹ค +๊ณตํœด์ผ +ํ˜ผ๋™ +๋ถ์  +์ ์‹ญ์ž +๋ด…์‹œ๋‹ค +๋ธŒ๋ผ์šฐ +๋น„์‹œ +๊ฐ€๋ฉด +๊ตฌ์šด +๋ฒฝ๋ฉด +ํํŠธ +ํ‰๊ฐ€์ „ +๋‹ฌ์ธ +BB +##์—˜๋ผ +์„ฑ๋ฆฌํ•™ +ํƒˆํ™˜ +๊ปด์•ˆ +๋ฒˆ๊ฑฐ +์†Œ์น˜ +1924 +๋กฏ๋ฐ์›”๋“œ +๊ฐ€์•ก +ํ™์ต +์ถœ์„ธ +๊ฐˆ๋ฆด +์œ„๋ฐฐ +์ˆ˜๋— +at +๋‹จ์˜ +์•„์‰ฌ์›  +๋‹จ๋ จ +์•„๋ฆฌ์Šคํ† ํ…”๋ ˆ์Šค +์ฃผ์–ด์กŒ +ํ•ด๋ฌผ +๋„ˆ๋ฌด๋„ˆ๋ฌด +๊ณผํ•™๋ถ€ +๋ชธ์ง‘ +์šฐ์ฆˆ๋ฒ  +๋ฒŒ์–ด๋“ค +์ด๋“œ +๋ฉ”์ด๋“œ +๋ฐ›์นจ +์•Œ์„  +##ํ•„๋“œ +๋ฐฉ์ฝ• +๋ง‰๋ง‰ +๋ฐธ๋Ÿฐ์Šค +๋ณด๋ น +ใ†์ด +๊ฒฝ์ž‘ +์•Œ๋ฆด +์œ ์˜ +์ „ํ†ต๋ฌธํ™” +๋กœ์ด +์นจ๋ฒ” +1934 +์˜์„ฑ +๋ฐ˜๋Œ€ํŽธ +ํ’๋ฏธ +ํ–‰์ •๊ด€ +ํ•™์ฐฝ +์ „๊ด‘ +์ค‘ํ™”๋ฏผ๊ตญ +ํŽŒํ”„ +์–ด์ด์—† +์•ฝํ˜ผ +My +๋”๋Ÿฝ +๋งค๋“ญ +๋ณดํ–‰์ž +ID +my +๋ถ€์ œ +๋‹จ์ฒด์ „ +##์• ๋‚˜ +๋ƒ„๋น„ +ํ™๋ช… +##๋ผ๊ธฐ +ํ…Œ๋ผ์Šค +์‹คํ—˜์‹ค +๋‹น์ฒจ์ž +์†Œ๋ฐฉ๊ด€ +๋‹ค์› +๋™์Œ +์Šค์œ„์น˜ +ํŠธ๋ฃจ +์˜์กด๋„ +๊ณ ๋งˆ์šด +##๋“œ๋ผ๋งˆ +ํœด์—… +๋ฐฉ์‚ฌ์„ฑ +ํƒค๋ŸฐํŠธ +๊ณ ์ถฉ +์œ ์ ์ง€ +์žฌ์ƒ +๋ฏธ์šฉ์‹ค +ํƒ์ง€ +๋ชจ๋ฅธ๋‹ค๊ณ  +์†Œ์†ก๋ฒ• +ํ† ๋จธ์Šค +๊ตณ๊ฑด +MM +๊ธฐ๋‹ค๋ ค์•ผ +ํ™”๋ฐฑ +๊ฒฐํ•ต +๊ฐ„์†Œ +1928 +์•„์ธ์Šˆํƒ€์ธ +##์•„๋ณด +๊ฟฐ๋šซ +๋ˆˆ๋ถ€์‹  +์“ธ๋ฐ์—† +##๋‚ด๋ ค +๋ฌด์Šน๋ถ€ +๊น€๋ฌธ์ˆ˜ +์ž๋ฐฑ +๋†’์ธ +ํ‰์˜จ +##ublic +์ฒดํ•˜ +ํ•œ์•ฝ +์•ผ๋‹จ +like +์•„๋“œ +์• ๋„๋ฆฌ์ŠคํŠธ +์›…์žฅ +ํญ์šฐ +์žฌํŒ์žฅ +์ •์ธ +๋ถ€๋„๋Ÿฌ์šด +๋ฐ€์–ด๋‚ด +##ํ–‰์‚ฌ +์ซ“๊ธฐ +์š”๋ฆฌ์‚ฌ +์—ฐ์ฃผํšŒ +๋…๊ฐ +๋น„์œ„ +##์•„๋จน +๋žจํ”„ +๊ท€์† +์†๋‹ด +์ŠนํŒจ +ํ™ฉ๊ต์•ˆ +๋…ธ๋ž˜๋ฐฉ +์‚ฌ๋ญ‡ +์ผ์ฃผ +์•ผ์ฟ  +์˜ˆ์ •์ž +์•„๋‚ ๋กœ๊ทธ +Mon +๋ฐœ์ „๊ธฐ +์‚ฌ๋ฅด +์žŠ์–ด๋ฒ„๋ฆฌ +1931 +ํ—ˆ๋ฆฌ์ผ€์ธ +์œ„๋ก€ +์–ดํ•„ +์—๋„ +์‚ฐ๋ฌธ +์ฃ„์ฑ… +์„ ๋กœ +On +์ฐฝ๋…• +๋ถ€์ƒ์ž +์นญํ˜ธ +๋ชจ์€๋‹ค +๋ถˆ์ถœ๋งˆ +๋‚˜์˜ต๋‹ˆ๋‹ค +์กฑ๋ฐœ +๋ฐ˜๊ธฐ๋ฌธ +์‹์š• +๋ฏผ๊ด€ +##๊ผฌ๋ฆฌ +ํ•„์ง€ +์ฒœ๋ง‰ +์ผํƒˆ +1918 +๊ทธ๋Ÿฌ๋ฉด์€ +๋“œ๋‚˜๋“ค +๋ถˆ๋ฒ•ํ–‰์œ„ +๊ตฌ๋งˆ +##์ฒœ๋ฆฌ +๋ฌด๋„ˆ์ง„ +๋„ค์˜ค +##co +##evel +๋“œ๋ฃจ +๋ฐ˜์ฃฝ +๊ฑธ์น˜ +๋ฒผ๋ž‘ +๋ชจ์—ฌ๋“ค +๋‹ค๊ฐ€์˜ฌ +๋ธŒ๋กœ์ปค +์žฌํ˜ผ +ํŒŒ์ด๋‚ธ +ํ˜„์ € +๋Œ์ถœ +๋ด‰์ง€ +##์†๊ธฐ +ํ˜๋Ÿฌ๋‚˜์˜ค +##์„ธ์šฐ +##iam +๋ฐ๋ชจ +๋…์ผ๊ตฐ +์•ผ๋งŒ +๋ฌด๋ผ +et +์˜ค์ฐฌ +์ค‘์•™๋‹น +๋™๋Œ€ +ํ˜œ์„ฑ +##CO +๋‹ค์‚ฐ +##ale +์„ธ์กฐ +์ฃผ์ง€์‚ฌ +์„ฑ๋ถ +์‹œ๋ฒ ๋ฆฌ์•„ +์–‘์† +ํƒœ์ƒ +๊ณก์‹ +์ €๊ธˆ๋ฆฌ +๋ถ„์‚ฌ +์•„๋‹ˆ์š” +๋งˆ์Œ๊ฐ€์ง +์นดํ”ผ +##์ฝ”ํ”„ +์˜ฅ์…˜ +์นด์žํ +์†์ƒ +##์šฐํŠธ +์–‘์žฌ +์ •์น˜ํ•™ +์ดˆ์Œ +์•ˆ์น˜ +์‹ฌํ–ˆ +์ ‘์–ด +์–ด์ง€๋Ÿฝ +1905 +DVD +์—ฐ๊ฝƒ +๋…ธ์ž +##ํ”ผ์–ด +##ใ…‹ใ…‹ใ…‹ +๋ฐ•๋™ +๊ณผ๊ฒฉ +๋ฉธ์น˜ +๋ณถ์Œ๋ฐฅ +์™„์ฃผ +์šฐ์ธก +ํ•˜ํ”„ +์‹ค์ˆ˜์š”์ž +ํํ•ด +๊ฒฐ์‚ฌ +๋ณด์ผ๋Ÿฌ +ํ›„์ž‘ +์ž์ดˆ +##๋ชจ๋น„์Šค +ํ•ด์–‘์ˆ˜์‚ฐ๋ถ€ +์˜์ˆ˜์ฆ +๊ณผ์™ธ +๋กœ๋นˆ +์‹ฌ๊ฒฝ +๊ณ ์งˆ +์—ฐํ•˜ +๋งˆ๋ธ” +๋นต์ง‘ +๋ฉด๋„ +์ด๋ค„์ ธ +๋‚ ์นด๋กญ +ํ•ด์‚ฐ๋ฌผ +๋ฌด๋ชจ +ํ™”๋ถ„ +์ฒญ์†ก +ํ„ฐ๋ฌด๋‹ˆ +๋ฆฌํ‹€ +๋ง‰๋ก  +๋”ฐ๋ผ์žก +๊ฒฝ์ œ๋ ฅ +์žฅ์–ด +๊ทธ๋ž‘ํ”„๋ฆฌ +์‹œ๋‹ˆ์–ด +์•™์ƒ +##๋‘˜๊ธฐ +์–ด์ฉŒ๋‹ค +๋”ํ•ด +ํƒœ์—ฐ +์„ฑ์ข… +์‹ธ๋Š˜ +๋ฐ•ํ˜€ +๋ณต์ง +ํฌ๊ทน +์ค€๊ฒฐ์Šน +์„ธ๊ด€ +์ž”ํ˜น +๋ช…๋ฐฑํžˆ +##์ง€์—„ +๋น„์ผœ +ํ•ญ์ƒ +์น˜์‚ฌ +์“ฐ๋‹ค๋“ฌ +๋Ÿฌ์‹œ +์•Œ์•„๋“ฃ +๊ฐ•๋“ฑ +์ฒ˜์‚ฌ +ํ˜„๋Œ€๋ชจ๋น„์Šค +์š”์ปจ๋Œ€ +๋ฒ„๊ทธ +๋ชจ์นœ +์‹ฌํฌ์ง€์—„ +์›”ํ‰๊ท  +๋„ค๋„ค๋„ค +๋ฒ ์ผ +ํ›„๋Œ€ +DB +ํ• ๊ฒŒ์š” +ํ†ตํ•™ +##์‹œ๊ณ„ +##๋‹๋ผ +๋ถ€์ฉ +instaf +๋ณ„์žฅ +์ง€๋‹Œ๋‹ค +##ise +์‹œ๋ชจ +์ •๋ˆ +ํ˜„๋Œ€๊ฑด์„ค +์„ธ์Šต +์†์ƒ‰ +##์ธ๋‹ค๋Š” +๊ต์ธ +์‚ฐ๋‹ค๋Š” +##ํŒŒ์ด์–ด +๋ชจํ„ฐ์Šค +์•ˆ์‹ +๋งž๋ฒŒ์ด +ํ•ด๋ถ€ +๊ธด์ถ• +๊ตฐ์ • +๊ตฌ์ถœ +์ฐจ๊ทผ +์ฒญ์ทจ์ž +##๋ ˆ๋งˆ +์‹ค๋ก€ +์Œ์Œ +์†๋ฐœ +##ren +๊ฐ€ํŒŒ๋ฅธ +์›Œ๋„ˆ +ํŒŒ์ถœ +2021 +์œค์ƒ +๋„๋ผ +๋ฐฑ๋‘์‚ฐ +๋ณต๋ถ€ +##๋ฒ„๋ ธ +์น˜๋ฅธ๋‹ค +๋ฐ•ํฌ +๊ณ ์ถ”์žฅ +๋‹ค๊ณ ์š” +๋ชจ๊ฑด +ํ—ˆ์ˆ  +๋งž์„  +์Šคํ‚ฌ +๊ณ ์˜ +๋งคํ˜น +๋งŒํšŒ +์ผ€์ด๋ธ”์นด +##์ฟ ๋ฅด +๋ฐ•๋ชจ +๋‚˜๋ญ‡๊ฐ€์ง€ +์‹œ๊ตฐ +์•ˆ์ „์‚ฌ๊ณ  +์ข…์‹ +ํŠน๋ณ„์‹œ +์˜๋† +๊ฑด๊ตญ๋Œ€ +๊ทธ๋ฆฌ์Šค๋„๊ต +์ž์‚ฐ๊ด€๋ฆฌ +๋ฐ”๋€๋‹ค +๋‚™์ง€ +ํŒจ๋“œ +์•ˆ์–‘์‹œ +๋‡Œ์กธ +๋ฐ”ํ +์—ฐ์ฐจ +๋‹ฌํ•ด +์ œ๋ฐ˜ +์šธ๋ฆผ +ํ•€ํ…Œํฌ +์•„ํด +La +์•Œ๊ณ  +##eth +๋ถ„๋ณ„ +๋งคํ™” +์„ ์šฐ +์›๋” +์›์Šค +์ด๋ฒ” +์ˆฑํ•œ +๋‹จ์ข… +##ull +์‚ฌ๊ต +์ฐฌ๋ž€ +์„ ๋ด‰ +์—ฌ์ค‘ +์ž”์†Œ๋ฆฌ +ํฌ๊ณก +๋ฏธ์•ฝ +##๊น€์น˜ +๊ณต๋ฒ” +๋ฉ€ํ‹ฐ๋ฏธ๋””์–ด +์„ ์œจ +์‚ฌ๋‹ค๋ฆฌ +##gh +๊น€ํ•ด์‹œ +์„œ๊ฐ•๋Œ€ +๊ทผ์‚ฌ +์•„๊ปด +๋ฒ”ํ•˜ +๊ฑด๋“œ๋ฆฌ +๋Š๋ฆฐ +์‹ ํ˜• +์–‘๋™ +์—์ด์ „ +ํ•œ๊ฒฐ๊ฐ™์ด +์„๊ฐ€ +์ˆ˜์‚ฌ๊ด€ +์—‡๊ฐˆ๋ฆฌ +์ƒ์„ธํžˆ +ํ™”๋ˆ +์ŠคํŠธ๋ ˆ +์นญํ•˜ +Coun +๋Œ์ด์ผœ +์žฅ์‹  +์ž์ด์–ธ์ธ  +##์šฉ์ˆ˜ +๋ฐ€๋Ÿฌ +์• ํ˜ธ +ใ†๊ณ  +์ˆœ๊ต +๊ฐ•๋Œ€๊ตญ +์—ฐ์žฅ์„  +๊ฐ•ํƒ€ +์œผ๋œธ +ํ† ๋ก ํ†  +๋Š๋ผ๊ณ  +์œค๊ธฐ +๋ฐฐ์‹ฌ +์˜ฅ์Šค +์ค„์–ด๋“ ๋‹ค +์ง“๋ฐŸ +์šฐ๋ผ๋Š„ +์ œ๋‹ˆ +๋Œ๋ ค๋ฐ› +์ด๋”ฐ๊ธˆ +๋…์ˆ˜๋ฆฌ +๋™๋งฅ +๊ธฐํ•˜ํ•™ +SOC +๊ฑฐ์ฃผ์ž +๋‚œ๊ด€ +์—ฐ์ค€ +์ฃผ๋ฆฝ +๊ตฌ๋งค์ž +์ค‘๋‚จ๋ฏธ +์†Ÿ์•„ +##์‹คํžˆ +์œ ์ฐฉ +์ƒ์พŒ +๋’คํŽธ +##๋Œ€์‚ฌ +##๋ฒ„๋ฆฐ +๋๋‚ธ +์žฅ์ด +Ge +1927 +๋นŒ๋ณด๋“œ +์‚ฌ์ˆ˜ +๋ท”ํŽ˜ +์ฒœ์ƒ +์‚ดํฌ +๋ฌด์ง€๊ฐœ +์ฐธ์‹  +์†Œํฌ๋ผํ…Œ์Šค +์”จํ‹ฐ +๊ธฐ๋ณธ๋ฒ• +์•Œ๊ณ ๋ฆฌ์ฆ˜ +๋‹ค๋ฌธํ™” +์–‘๊ถ +์˜๋‚˜ํƒ€ +##์ด์˜ +##๋ฟŒ๋ฆฌ +ํ”ผ๋ผ๋ฏธ +ํ”ผ๋“œ๋ฐฑ +๋ฒ ์ด์ปค +์˜ค์›” +ํ•œ๋ฌธ +์—ฌํ•˜ +์ฒญ๋ฐ”์ง€ +๋‚˜ํŠธ +์†Œ์†Œ +์ผ์ถ• +1926 +##๊ฒฝ์ฐฐ +์€ํ‰ +์ฒญ๊ฒฐ +##๋”๊ธฐ +์•„์ดํŒŒํฌ +instafood +์‹์ดˆ +๋กœ์…˜ +์ œ๋น„ +์‹ค๋ฌด์ž +์ฆ์ถ• +๊ณต๊ต +์†๊ผฝํžˆ +์‹ค๊ธฐ +##๋‚ฉ๋‹ˆ๋‹ค +์˜๊ตญ์ธ +์˜ˆ๋ฆฌ +์˜ท์ฐจ๋ฆผ +์ˆ˜๋—๋ฌผ +##์บํ”ผํƒˆ +์ˆŸ๊ฐ€๋ฝ +ํ‘๋ฐฑ +๊ทผ๋ž˜ +KAIST +์—ด๋ ฌ +์™ธ์นจ +์–ด๋–ค๊ฐ€ +๋น„๋น”๋ฐฅ +์‘๊ธ‰์‹ค +๋ฌด๋ฆผ +์ฑ…์ž +๋ผ๋–ผ +์™„์„ฑ๋„ +์‹ค๋งˆ๋ฆฌ +์ง€ํ‚ด +๊ณ„์•ฝ์ง +##๋งˆ์Œ +##erm +์„ธ์•ˆ +์‚ฐ์ฑ…๋กœ +๋ถˆ๊ฑฐ์ง€ +์กฐ์‚ฌ๋‹จ +์„ฑ์ ํ‘œ +์ž…์„ฑ +ํ™ฉํ•ด +๋จธ๋ญ‡ +๋ฐ•ํƒœํ™˜ +๋ญ‰์ณ +์ €๊ฒƒ +์ƒ์ˆ˜๋„ +๋ธŒ์ด +์–ด๋ฒ„ +์•ˆํฌ์ • +ํ˜„๋ฌผ +ํš์ผ +๊ท€ํ•˜ +๊ธฐ๋งŒ +๊ด€์ฒญ +๋Œ€์ฒญ +๋“ค๋ฆฐ๋‹ค +ํ‹€๋ ธ +๋‹จ์–ธ +ํ„ฐ๋“ +์„ ์ƒ +ํ•„๊ธฐ +๋ผ์–ด๋“ค +##๋”๋‹ˆ์ฆ˜ +๋„˜์–ด๊ฐˆ +์ง‘์š” +##ํ‚จ์Šค +๋ชจ์ฐจ +##๋งž์ถค +##๋กœ๊ทธ๋žจ +๊ฒธ๋น„ +๋ฒ ๋„ค์น˜์•„ +์›์ž๋กœ +ph +๋‚™์—ฝ +๋‹ค๊ฐ€์™€ +##ins +์šฐ์—ฌ +ํ™”๋‹ต +์ˆ˜ํ•˜ +1907 +์–‘๋‹น +์ฒญ์–‘ +ํ‘œํ–ˆ +์‹ ๋Œ€ +๋ฌธํ™”์› +๋ธŒ๋ผ์ด์–ธ +##์˜์ˆ˜ +์‹ ์˜ +์Šคํ†ก +As +##ix +##๋ฏธ์•„ +์„ฑํ™ฉ +์ธ์ฒ™ +##๋ฒ ๋ฅดํฌ +๋„๊ต +๋ฐœ๊ด‘ +์žฅ์™ธ +ํŽธ์ต +##๊ธฐ๊ธˆ +##enn +##๊ตญ์žฅ +๋ฐœํ•ด +๋™์ด +ํ†ต์šฉ +๊ณ ๋ฒ• +์˜ค๋ฏธ +๊ณ ๋งˆ์›€ +##uk +๋ฉด๋ฐ€ํžˆ +ML +๋‚˜์ง€ +ํŽธํžˆ +๋‡Œ์กธ์ค‘ +ํ”„๋กค +์Ÿ์ด +##์ง€๋ฐฉ +##์ž‘๋ฌผ +์น˜๋‹ซ +์นผ๋‚  +ํ”„๋ฆฐํ„ฐ +๋กœ๊ทธ +ํƒํ•œ +๋นˆ๋ฏผ +๋“ค์–ด์˜จ๋‹ค +๋ฏผ๋ง +##๋งˆ๋ฃจ +์ผ์–ด์„ฐ +๋ฏผ์š” +๋“ค์œผ๋ฉด์„œ +๋Œ€ํ‘œ์ž‘ +Se +๋”ฐ๋ผ์˜ค +๋ผ์˜ค์Šค +์žฌ๋Ÿ‰ +์ฃผ์›Œ +##form +๊นŒ์Šค +์ œ๋•Œ +ํผํŠธ +ํƒ„์„ฑ +์œ ๋ก€ +WTO +์‹ ์†ํžˆ +ํ”Œ๋ผ์ž +์–ด์ดŒ +์ง€์ € +๋ฒ„์ง€๋‹ˆ์•„ +ํ† ๋„ˆ๋จผํŠธ +๋„์Šค +๋ถ„์ถœ +๊ฐ‘์ƒ +##๊ณก์ ˆ +๊ธ€์Ž„์š” +๋ฒ„๋ ค์•ผ +##๋ฐ”ํ€ด +๋‚จํ•™์ƒ +##erson +๊ฐ–์ถฐ์•ผ +์„œ์ง„ +##OD +์šด์ „์‚ฌ +์š”๋ฏธ์šฐ๋ฆฌ +ํ•ด์ปค +Rob +ํ›„๋ณด์ง€ +๋น ์ง +Qu +##ative +๋์—ˆ +๋ฌด์†Œ +##๋ž˜์š” +ํˆฌ์ž๊ฐ€ +์™€์ผ๋“œ +der +์ „๊ฐ€ +##ํ™œ์•ฝ +์ดํ†  +์–‘๋„์„ธ +์ถœํ†  +๋‹จ๋ฉด +ํ‡ดํ–‰ +Sm +๋ถ€์„ค +1700 +์ฒ™ํ•˜ +๋ธ”๋ฆฌ +๋†์‹ฌ +๋ฉ”๋‰ดํŒ +๊ทธ๋ฅด +๋”œ๋ ˆ๋งˆ +๋‹ค์ ธ +##๊ป˜๋ผ +๊ฐ€๋‹ค๋“ฌ +์ด๊ทผ +๋™์Œ์ด์˜ +๋ฌธ์–ด +๋ชธ๊ฐ’ +##๊ทธ๋งˆ +์ œ์ฒ ์†Œ +์–ด๋ถ€ +์ˆ˜์ˆ˜๊ป˜๋ผ +##์˜ํ™” +๋„˜์–ด์ง€ +์–ด๋”œ +๋ฉˆ์ท„ +์ด์‚ฌ +ํ•˜์ดํŠธ +CR +๋ฌด๋ฅด +๊ธฐ๋‚ด +๊ฑด๋ฐ์š” +๋”ฐ๋‚ด +๋•Œ๋ก  +์•ˆ์‚ฐ์‹œ +ํ•˜๋ฅ˜ +ํ˜ธํ™” +๋‹ค๋Ÿ‰ +์ฝ”ํŒ… +๊ฐ•๋ฏผ +์ด์ฒ  +ํ•จ์„ฑ +์Šคํ… +๋‹ด๊ฒผ +์ €๊ฒŒ +๋”œ๋Ÿฌ +๊ณต๋ฏผ +ํŒŒ์ดํ„ฐ +์‹ ๋ฐ +์‹ค์† +์‹œ์™ธ +๊ตญ์ œ๊ธฐ๊ตฌ +๋ฐœ๊ธฐ +๋ง๋” +์ฃ„์ฑ…๊ฐ +๊น€์‹  +##์ฆˆ์Œ +##๋”๋ฏธ +์˜ˆ์‹œ +๋‹ค๋ณ€ +๊ณต๊ถŒ๋ ฅ +๋งˆ์šฐ์Šค +ํŒจ๋Ÿฌ๋”” +์˜ฌ๋ฐ”๋ฅด +##๋„˜๊ธฐ +์ฒซํ•ด +ํ”ผํ–ˆ +๊ฐ€๋ถ€์žฅ +๋‚จ์› +์Šนํ™” +##ํํ•ฉ +##ky +์šฐ์—ฌ๊ณก์ ˆ +์œผ์‹ค +๋ชจ์˜๊ณ ์‚ฌ +๊ณจ์ ˆ +๋ฑ…ํ‚น +1923 +๋ŒํŒŒ๊ตฌ +๋ณด์ • +ํ˜ผ๋‹ค +##์–ด๋‚ธ +์‹œ์•ˆ +์ทจ์—…๋ฅ  +ํ•œ๊ตญ์‚ฌ +๊ธฐ๋ฌ˜ +๊ทผ๊นŒ +๋‘๋“ค +๋ชฐ์•„๋„ฃ +๋งนํ™œ์•ฝ +##land +์œ„๊ธฐ๊ฐ +์–ฝ๋งค +์ง‘๋‹ˆ๋‹ค +์ถ˜ํ–ฅ +ํ˜„๋Œ€์บํ”ผํƒˆ +์กฐ๊ต +์•„๋ž‘๊ณณ +์žฅํฅ +๊ฒฝ์พŒ +ํŽธ๋ฒ• +์• ์™„ +์ชผ๋” +๊น€์‹œ +##๋„์ „ +์‚ฐ๋ชจ +์ •๋ฒŒ +##๋ธ”๋กœ +์ดˆ๊ต +์ดˆ์„  +ํ˜•๊ตญ +๋ฉ€๋ฆฌ์„œ +๋’ค๋”ฐ๋ผ +๋– ๋‚œ๋‹ค +์„ ํ˜• +์ œ์Šค +ํŒจ๋”ฉ +๋ชฐ๋ผ์„œ +๋งค๊ธฐ +##์ฒด์œก +์ฒญ์†Œ๊ธฐ +์Šคํ…Œ์ด์…˜ +๋๋ƒˆ +๋ผ์ณค +๋”ฐ์Šค +๋ชธ๋ถ€๋ฆผ +๊ด€๋Œ€ +ํŒŒ๋ž€์ƒ‰ +๊ณตํ•ด +Bar +์†Œ์žฌ์ง€ +์„ํ•™ +๋ฏธ์„ฑ๋…„ +ํŒŒ๊ธฐ +์–ด์ฉ์ง€ +ํ„ฐ๋ณด +์˜ฎ๊ธธ +๋ฐ๋ ค๋‹ค +๋ฏธ์…ธ +##์–ด๋ณด +์†Œ๋ฆ„ +์ƒ๊ฒฝ +ํ•ด์ ธ์„œ +๋ฏธ์Šคํ„ฐ๋ฆฌ +๋ณต๋ฆฌ +์˜ค์Šนํ™˜ +์–ด๋ฒ„์ด +์ „๋™์ฐจ +ํ…Œ์ผ๋Ÿฌ +IBK +์„ธ์ • +##๊ธฐ๋… +์ค‘์ข… +์•„์šธ๋ › +๋ฐฑํ˜ˆ +๊ฐ‘์ž‘์Šค๋Ÿฐ +๋ฐœ๋ž„ +์งˆ๋Ÿฌ +๋ฌธํ™”์žฌ์ฒญ +์•ˆํ† ๋‹ˆ +์ด์™• +์„ ์ž… +๊ฒฝ์ • +๋’ท๋ชจ์Šต +์ด๋ฃจ์–ด์ ธ์•ผ +We +์ž๋ช… +๋‹ˆ๋ผ +์ผ€์ธ +ํ”„๋ ˆ์  ํ…Œ์ด์…˜ +์ „์Ÿํ„ฐ +์„ธ๊ฐ„ +์•จ๋Ÿฐ +๋น„์›Œ +์ธ์‚ฌ๋ง +CI +๊ตฐ์ž +์ ˆ์ถฉ +๊ฐ€์˜ฅ +##์ฐจ๊ธฐ +##โ €โ €โ €โ €โ €โ €โ €โ € +๋ฐ•ํžŒ +##์ง€๋ฆฌ์•„ +๋‚˜์ด์ง€๋ฆฌ์•„ +์†Œ๋ น +๋ ค๋‹ˆ +์„œ์˜ˆ +์˜์›” +๋ฌต์ง +KG +##nal +๋งค๋„ˆ +์ฃผ์‹ฌ +์›๋ฃธ +550 +๋‹จ์กฐ +ํ•™๋น„ +๋น„๋ฒผ +์…”ํ‹€ +์œ ์†Œ๋…„ +์ด์ฒœ +์ž…์žฅ๊ถŒ +๊ณ๋“ค์—ฌ +ํ•œ๋ฆผ +์ „ํญ +##์‹œํ—˜ +๋‹›์‚ฐ +tr +๋Šฅ์ˆ™ +๋™์œ ๋Ÿฝ +ํ•ฉ์นœ +๊ตฌ์ง +๊ธฐ์ค€์น˜ +##11 +MW +๊ณ ๊ตญ +๊ณจ๊ฒฉ +๋‹ฌ๋ ฅ +์˜ค๋ฉ” +๊ณ„์—„ +ํŒ์ด‰ +ํŠธ์œ— +๋ฉ๋‹ฌ +์—๋””์…˜ +์„ฑ์ˆ˜๊ธฐ +์„ธ์›Œ์กŒ +๊ฐ์ฒด +๋งˆ๋ฆฐ +์ŠคํŠœ์–ด +์—ฐ์˜ˆ๊ณ„ +์—๋ฒ„๋žœ๋“œ +ํ„ฐ์งˆ +๋„ˆ์Šค +๋ž˜์š” +์—ฐ์ˆ˜์› +๊พธ๋ฆฌ +์ •์šด +##๋ฐ”๋ฅด +๊ณ ๊ฐˆ +์‹ ๊ฒฝ์ „ +์ •์ˆ˜๊ธฐ +##์‚ฌ๊ธฐ +๋ผ๊ณค +๋นˆํ‹ˆ +๊ณต์Šต +๋นผ๋‚ด +์‹ค๋ ธ +์ฐฌ๋ฌผ +๊ฐ๊ธˆ +์œ ๋ฆฐ +์šฐ์ฃผ์„  +ํ•˜๊ตฌ +NY +ํ‘œ๋ฅ˜ +๋„์ฟ  +์„ญ์„ญ +Su +๋‹ด์–‘ +ํŽœ์…˜ +that +๋ฏธ๋„ค๋ž„ +##์ต์Šค +์งˆ์ฑ… +##์˜ค๋“œ +๋ณด์•ˆ๋ฒ• +ํŒŒํŽธ +์—ฐ๋‹ฌ +์žฌ๊ฒ€ํ†  +์–ด์ฐŒ๋‚˜ +Cent +##๋ฌด์žฅ +ํ…Œ๋Ÿฌ๋ฆฌ +ํ”Œ๋ ˆ +์‚ฌ๊ณ ๋ ฅ +์‚ฌ๋ถ€ +๋™์ผ์‹œ +๋ถ„์‹ค +Will +ํ˜ธ์ถœ +์ข‹์•„ํ–ˆ +ml +##๋ฏผํ„ด +๋งŒ๋…„ +ํ—ค๊ฒ” +์ดํšŒ +1922 +์ง‘๊ฒฐ +๋ชจ์ฐจ๋ฅดํŠธ +์‹ ์‹œ +์ฟ ๋ฆฌ +๊ฐ€๊นŒ์Šค๋กœ +์ž์„œ์ „ +๋ถ€๋”” +๋ฉ๋‹ˆ๊นŒ +๊ฝƒ์žŽ +๋งˆ์šด +๋‘๋ฆฌ +์—„๋‘ +์‹์šฉ +์•ž๋‹น๊ฒจ +๊ถŒ์œ„์ฃผ์˜ +์Šˆ์ฆˆ +๊ฐ”์—ˆ +๊ธ€๋ฆฌ +์ฐฝ๊ฐ€ +ํ†ตํํ•ฉ +๋…๋ณด +๋ฏธ๋”” +๊ฐ•์ •ํ˜ธ +ํš์ • +์œŒ์Šจ +ํ™”ํ•ฉ๋ฌผ +๋‚™์ธ +๋Œ€์„œ +๋ฐ˜๊ตฐ +์Šค๋ฆด๋Ÿฌ +ํฌ๋ ˆ์ธ +๋„๋กœ๊ณต์‚ฌ +๋นผ์•—๊ธฐ +์นผ๊ตญ์ˆ˜ +๊ตญ์ต +1921 +์ถฉ์ฒญ๋ถ๋„ +##์—๋ฅธ +์ด์ •ํ˜„ +์‚ญ์Šค +์ค€๋‹ค๋ฉด +NGO +์ •์˜ค +์•„๊ตฐ +๋ฐ˜์—ญ +๋น„์•ฝ +์บ๋ฆฌ +๊ณ ์› +ํ† ์˜ +ใ†์ผ +์ด๋‚จ +์™ธํ•  +##ley +๋‚˜์œ„ +์ง€ํ‚จ +๋“ค์–ด๋„ +๋™์„  +๋ฐค๋‚ฎ +๊ฒฐ๋ณ„ +##air +์นœ์„  +๊ธฐ๋ณ‘ +์‚ฌ๊ทน +ํผํŽ˜์ด +For +๋‚ด๊ฑด +๋ฐ€๋ผ๋…ธ +๋“€์–ผ +๋‹น๋ก  +์‹œ๋„ค๋งˆ +์˜ฌ๋ผ์„œ +๊น€๋ฒ” +์ด๋žฌ +ํƒˆ์„ธ +ํŽ˜์ธํŠธ +ํ•™์„ค +๊ตญ๋ฏผ๋‹น +ํ”„๋กœ์„ธ์„œ +๊ฐ‘์ž‘์Šค๋Ÿฝ +ne +##์ฐจ๊ทผ +์ €์Šน +๋ฐด์ฟ ๋ฒ„ +์ฐจ๊ทผ์ฐจ๊ทผ +ํžŒ๋‘ +์ƒํ™ฉ์‹ค +๊ฒฝ๋ฅœ +๋™์ž +์ฒ˜ํ–ˆ +ํ™‹์นด์ด๋„ +์นœํ•˜ +##๋ฏผ์ฃผ๋‹น +์ง๊ธ‰ +๋ชฉ์žฅ +์ถ”์›” +๋ถ€์ง€๋Ÿฐํžˆ +์†๋…€ +์˜ˆ๊ธฐ +์‚ฌํˆฌ๋ฆฌ +ํ๋ฅธ +ํ˜น์‹œ๋‚˜ +์›Œํฌ์•„์›ƒ +๊ทน์ฐฌ +์–‘์ฒœ +๊ด‘ํ•ด๊ตฐ +ํŽ˜๋„ํ‹ฐ +๋ธ”๋ฃธ +๋†”๋‘ +##๋‚˜๊ธฐ +##ital +์ž…์žฅ๋ฃŒ +์ƒˆ๊ฒจ์ ธ +์œ ๊ฒฉ์ˆ˜ +์šฐ์ฆˆ๋ฒ ํ‚ค์Šคํƒ„ +LIG +๊ฐœ์  +๋ฒˆ๋ฒˆ +##ven +์ถœ์‚ฌ +์žฅ๋™ +##์˜ˆํ”„ +##์ƒ๊ฐ€ +์•„๋ฏธ๋…ธ +NLL +์Šˆํผ์Šคํƒ€ +์ž๊ฐˆ +์ฝ˜ํผ๋Ÿฐ์Šค +๋ˆ„๊ตฐ์ง€ +๋Œ€๋ฏธ +ใ…‡ใ…‡ +์น ๊ณก +ํŒŒํƒ„ +๋ฌดํ•œ๋„์ „ +์ถœ๋ฃจ +์ง€์ผœ๋ณธ +ํŽผ์ณ์กŒ +์‡ผํŠธํŠธ๋ž™ +๋™๋ฌด +์‚ฌ์šฉ๋ฃŒ +์„ฑ์ˆ˜ +##ํ”Œ๋Ÿฌ +๋””๋ฐ” +์กฐ์ œ +ํ•จ์ถ• +์ดํƒœ์› +์ธํŒŒ +##๋‰ด์Šค +๋ฐ”๊นฅ์ชฝ +๋•๋ชฉ +๋ฌผ๋“ค +๊น€๋„ +์ƒ์ˆ˜ +์„ ์˜ +๋Š๋‹ท +๊ณ ๋‹จ +๋กœ์šธ +์ •๊ธ€ +์„ ์–ธ๋ฌธ +ํšŒ์‚ฌ์› +์œค์ข… +๋ˆŒ๋ € +๋ฐํ˜€์ง„ +ํ”ผ๋”” +Go +ํƒœํ™” +๊ธฐ์Šน +๋ฌผ๊ธฐ +ํ›”์น˜ +์•„๋กœ +๊ด€๊ด‘๊ณต์‚ฌ +์ค€์šฐ์Šน +์›ƒ๋Œ +ํ›„์›์ž +##ํƒœํ‰ +์‚ดํˆ +##์‹ญ๋‹ˆ๋‹ค +์ฑ„์ฐ +์ณ์„œ +1917 +์ „๋ฆฝ +๋ถ„ํ–ฅ +##inc +##๊ด€๋ฆฌ์œ„์›ํšŒ +๋ฏธ๋ฅต +์—ฐ์œ  +ํ”ผ์นด +##CI +##์›ƒ์Œ +##๊ตํšŒ +์žฅ์„ธ +OK +์ง€๋‚ผ +์–ฝํ˜€ +๋Š์Šจ +์—ฌ์ข… +๋ญ˜๊นŒ +TK +ํž˜๊ฒน +650 +๊ฐœ๋™ +##๋‹ˆ์ง€ +๋’ค์ฒ˜ +์••๋ฅ˜ +๋‹ค๋ฌธํ™”๊ฐ€์ • +๊ฒฝ๋ณต๊ถ +๋ง๊ณ  +๊ธฐ๊ณ  +์นดํ”„ +๋‹ˆ์‹œ +์ฃ„์ˆ˜ +ํ˜„์ €ํžˆ +๊ต์™ธ +๊ณ ์†Œ๋“ +์ฒจ์˜ˆ +์œ„ํฌ +๋ฃจ๋งˆ๋‹ˆ์•„ +์ด๋ž˜์„œ +##ps +๋งˆ์Šค +ํ”ผ์›Œ +๋จธ์ง€ +๊ฒฌ๋ณธ +ํ•ธ๋“ค +์น˜์•ฝ +ํŽœ์Šค +์ •ํƒœ +์Šค๋ฉฐ๋“ค +์–ด์ ฏ +์ฐฉ์•ˆ +Ass +์“ฐ๋Ÿฌ์กŒ +์•„์Šฌ +ํ™œ์ฃผ +ํš๊ธฐ์ ์œผ๋กœ +๋‚˜๊ฒฝ +์˜์œ  +ํ…Œ๋ฅด +์ด์›” +์œ ๋ง์ฃผ +๋‚ด๊ตญ +๊น€๋ณด +๋„ํ™” +์ฒญ๋ผ +ํ”„๋กœ๊ทธ๋ž˜๋ฐ +ํ”„๋ผ์ž„ +์–ด์ ฏ๋ฐค +๋ถˆ์˜ +๋Œ€์นญ +์ฑ„์šด +์˜ˆ์ ˆ +์šด์šด +ํ…Œํฌ๋†€๋กœ์ง€ +BI +๋น„๋‘˜๊ธฐ +๋ณธ์ฒด +์„ ๊ฑฐ๊ด€๋ฆฌ์œ„์›ํšŒ +์œ„ํ‚ค +##ํšŒ๊ณ„ +๊ฒฉํˆฌ +##ience +์‚ผ๊ฐํ˜• +๋ฒ„ํ…จ +##๋ชจ์ง +๋ฆฌ๋ฒ„ํ’€ +์ผํ’ˆ +ํˆฌ์‚ฌ +๊ตฌ๊ฐ• +๋ฃจํ”„ +๊ท€๊ฐ€ +๊ฐ„๋”” +ํ›ˆํ›ˆ +์กฐ์ง€์•„ +๊น€์˜๋ž€ +์นด์šดํŠธ +ํƒ€๊ตฌ +๋ ˆ๋ฐ” +##TN +ํ‰ํ–‰ +๋ถˆ๋ฆด +์›ํ•œ๋‹ค๋ฉด +๊ตด์š• +๋ฐฐ๋“œ๋ฏผํ„ด +์ •์ข… +์•ž๋‚  +์•ž์„ธ์šด +๊ฒฝ์น˜ +ํ•ญ์ƒ์ œ +์ˆ˜์ž‘ +๋ผ๋˜ +์˜ฌ๋“œ +ํŒจํ–ˆ +์œจ๋ฒ• +##BI +##์ดˆ๋ฆฌ +์ถฉ์‹คํ•˜ +์นœ์ผํŒŒ +์–‘์Šน +์‹ฌ๋„ +๊ณผ๋ฐ˜์ˆ˜ +##๋‚ ๋‘ +๊ทผ๋ ฅ +๊ธธ๋ชฉ +์ œ์ผ๋ชจ์ง +์ฒด์ธ์ง€ +๊ฒฐ์† +๊ณ ๋ น์ž +##๋‹์›€ +์ง๊ฒฉ +๋‹ด๋ฑƒ +๋กœํŽŒ +์Šค์ฝง +##๊ทธ๋ฆ‡ +ํ˜ธ๋‚ ๋‘ +์žฌ์™ธ +๋ฐœ๋‹์›€ +์ฐฝ์‹œ +๋น„์ฃผ +์‚ฌ์˜ต๋‹ˆ๋‹ค +##์ฃผ๊ณก +ํ•ด์ˆ˜๋ถ€ +๋‚จ๊ฒฝํ•„ +๊ธฐ์™€ +์นด๋ฆฌ +๋ถ„๊ธฐ์  +๊ธฐ๋ก๋ฌผ +๋ฐ์นด +##๊ฑฐ์Šค +์˜ค๋”” +์ฒญ์ •๊ธฐ +๋‹ด๊ทธ +์ฑ„ํŒ… +๊ทผ๋ฆฐ +์˜์•” +๊ฐ€์žฅ์ž๋ฆฌ +๋‚ด๋ ค๋‹ค +##๋ถ€์กฑ +๋‹น์ • +์‚ฌ์ƒ๊ฐ€ +์ตœํ•˜ +์นœ๋‹ค +์ง€์œผ +##ํŒŒ์—… +์šธ์ฃผ๊ตฐ +##ics +์ฑ„์›  +org +๊ฟˆํ‹€ +๋ฌด๋ฆ… +๋ถ€๋ฅด์ฃผ์•„ +์˜ค๋งˆ์ด +ํ์ˆ˜ +1908 +##๋ชจ๋ฆฌ +๋„๊ฐ€ +์ œํŠธ +๊ฒŒ์„ +ํ™ฉ์†Œ +ํ•œ๋ฐ”ํƒ• +ํ‰ํ„ฐ +๊ฐํ•˜ +๊ฐ€์ถœ +์ธ์นœ +ํšกํฌ +ํ™ฉํ +์‚ฌ์‹œ +cat +์ž์•„๋ƒˆ +๋‹ค๋„ค +์ˆ˜์–ต +##ํ•ด์กŒ +๋ฐ€ํฌ +์‚ฌ์ด์–ธ์Šค +๊ฒฝ์˜ +๋ฐฐํ‹€ +์‹ ๋‹ค๋ฉด +์ €๋ณ€ +๋ฐ•๋ณ‘ํ˜ธ +์™„์ˆ˜ +์ ˆ๊ฐœ +ํƒ„์‹ +ํž˜์จ +์ €๋„๋ฆฌ์ฆ˜ +ํ‹€์–ด +Med +๊ต์ „ +๊ฐœ๋ช… +๊ฑด๋„จ +๋ฌด๊ณ  +์ผํ„ฐ +์ถœ๊ฐ€ +์„œ์Šด +ํƒˆ์ทจ +And +##ondon +์š”๋Ÿฐ +๋‹ค์ณค +ํ™•์—ฐํžˆ +ํฌ์ผ“๋ชฌ +์„œ๋Š˜ +ํฌ์‹ +ํž˜๊ป +๊ณ ์„ +ํ๋ง‰ +์‚ฌ์ƒ‰ +๋ฌธํ•™์‚ฌ +์‚ฐ๊ณจ +์ด๋ฐฉ์ธ +๋ฐ€์–ด๋ถ™์ด +ํŠน๊ฐ€ +๋‹ค๊ฐ€๊ฐ” +์ ํผ +์ดํŒŒ์—… +##๋ถ€๋ฆฌ +๋ชฝ๊ณ  +##์ต์Šคํ”ผ์–ด +CT +ํ‰์ค€ +์‹ซ์–ดํ•˜ +๋‚™ํƒœ +์นดํ†ก +๋ธ”๋ž™๋ฆฌ์ŠคํŠธ +๋‚ด๊ณผ +##ization +##oon +๋‚˜๋“ค์ด +์…ฐ์ต์Šคํ”ผ์–ด +์†Œ์ด +ํœด๋จผ +##์ œ๋‹น +๊ตฌํƒœ +๊ณ ํ’ˆ์งˆ +์‹ ํ•™์ž +๋‹ค์„ฏ์งธ +์œ ๋ชฉ +net +๊ธฐ์ž์žฌ +์‚ฌ์  +๋ฑƒ์† +ํŒŒ์—ด +์ˆ˜์ธ +์ฐฝํ”ผ +๊ฐ€๋”” +์—ด๊ฑฐ +์„ค๊ณ„์‚ฌ +๊ท€์—ฌ์›Œ +์ ˆ์นœ +##๋งŒ๊ธˆ +์˜†๊ตฌ๋ฆฌ +์„ญ์™ธ +์ดํ† ๋ก +๋‚ญ๋… +๊ฐœํ‘œ +์ „๋ฌธ์ง +์œ ๋žŒ +์ง€์ผœ๋ด์•ผ +๋ณ€ํ•  +์–ผํ• +๋งˆ๋Š” +๋Œ๋ฆด +๋น„์—”๋‚ ๋ ˆ +๋ฆฌํŠฌ +๋ช…์žฅ +์บ์Šฌ +๋ถ€์—ฌ๋ฐ› +์ข‹์•„ํ•œ๋‹ค +๋ถ€ํ”ผ +ํ์•” +ํšŒ์˜์žฅ +์ง€์˜ค +๋–จ์ณ +##conom +##๋Ÿฐ์น˜ +ํ‡ด๊ณ„ +๊ฐœ๋„ +se +๊ณต์ƒ +์ถ”์›Œ +ํžˆ๋กœ์‹œ๋งˆ +๊ฐ€์Šต๊ธฐ +๊ฒ€์ƒ‰์–ด +ํ”ผ๋ผ๋ฏธ๋“œ +ํ—ค์ณ +์ทจ์ž„์‹ +๊ธ‰์†๋„ +์ค„์ธ +๋ถ€์‚ฐํ•ญ +๋‚ด์‹œ๊ฒฝ +๋ณด์˜จ +์บก์Š +##tics +ํ˜ธํˆฌ +๋œจ๊ฑฐ์›Œ +๊ณ„๋ฅ˜ +๋ฌธ์ • +๋‚˜์‚ฌ +ํœด์ง€ +์œ ํ‚ค +์„œ์˜ +ํ˜„๋Œ€์‚ฌ +Ag +์ œ์ผ์ œ๋‹น +ํ”„๋žญํฌ +๊ตฌ์น˜ +๊ฐˆ์ฆ +ํ†ตํ‹€์–ด +ํ•ฉํ•˜ +๋ฏธ๋• +ํ…Œ๋‘๋ฆฌ +##์ˆ˜์—˜๋ผ +๋“ค์ฉ +์–ด๋ฒ• +##๊ฐˆ์ด +์—๋ฏธ +ํƒœ์–ด๋‚  +์ „๊ณผ +##๊ฒŒ์ด์…˜ +์œ ์ž +ํ˜„์ฃผ +๊ฑฐํ–‰ +์•„๋‹Œ๊ฐ€์š” +ํƒ„์ˆ˜ +๋ฐ”์ด์—๋ฅธ +๋ชจ์•„์„œ +๋›ฐ์ณ +์•„๋ฆฌ์•„ +์œ ์‹œ +๊น€ํ˜„์ˆ˜ +์•„์ง€ +CGV +๋„๋ฆฌ์–ด +##๋งˆ๋Š” +๋Š๊ปด์งˆ +์กฐ๊ธ‰ +๋‹ค์กŒ +์ˆ˜๋ ฅ +์นด์žํ์Šคํƒ„ +์ฐจ์šฉ +๋Œ€์ถœ๊ธˆ +๋‚ด๋น„๊ฒŒ์ด์…˜ +์Šค์น˜ +์‹œ๊ฐ„๋Œ€ +๋ฌด๊ถํ™” +์ €์ˆ˜์ง€ +ํ† ์Šค +IPTV +์žฅ์ • +๊ณ ๊ณต +ํ›„์ƒ +๋งž์ถœ +##own +๊ธฐ์šธ์—ฌ์•ผ +๊น€์†Œ +์—…์ฃผ +์ž๊ฐ€ +Rev +์„ธ์›Œ์•ผ +๋น ์ ธ๋“ค +์ค‘์šฉ +๊ฑธ๋Ÿฌ +์„œํƒœ +๊ณ ๋“ฑ์–ด +ํ•ฉ๋ณ‘์ฆ +์•…์˜ +๊ฐ€์Šด์‚ด +##๋ ˆ์˜ค +PK +์›์ƒ +op +๋…ธํšŒ +๊ณ ์ทจ +๊ธฐ๊ณต +๊ฐ•์†Œ +์›์‚ฌ +์กฐ๋ ฅ +๋ฐฐ์„ค +์ˆ˜๊ต +์ธํ—ˆ๊ฐ€ +์ผ๊ณผ +๊ฑฐ๋ž˜์ฒ˜ +๋‘ฌ์•ผ +ํ˜„๋Œ€๊ทธ๋ฃน +์ง„์œ„ +์ตœ์„ฑ +๊ธฐ์–ต๋ ฅ +๋‹ญ๊ณ ๊ธฐ +์‹œํ”ผ +๊ธธ๊ฐ€ +์˜ˆ์–ธ์ž +๋„ฅํƒ€์ด +๋ฐ•์ž +๋””๋ ‰ +๋ถˆ๋กœ +Jour +์Šค์นด์šฐํŠธ +๋“ค๋ € +์ปค๋ฆฌ์–ด +ํฌ์Šค์ฝ”๊ฑด์„ค +IR +์—ด์„ธ +๊ตฐ๋ฆผ +๊ฐœ๋…„ +์ฃผ์› +์ •์„ธ๊ท  +๋ง๋ฆฐ +๊ฒฝ์งˆ +์บํ”ผ +##์ง€์šฐ +##๋ฌด์‚ฌ +##tical +์ •์—ด +๋ช…์‹ค์ƒ๋ถ€ +๋…ธํƒœ์šฐ +์‹ ํ•ญ +๊พธ๋ฏผ +๋…๊ฑฐ +์™„๋งŒ +์ž‡๋‹จ +##์ƒ๊ธด +ํŽ˜๋ฏธ๋‹ˆ์ฆ˜ +์˜ค์ฃฝ +ํญ๋™ +๋‹ฌ์„ฑ๊ตฐ +๋ฐ”์ณ +์–ด์„œ๋‹ค +ํƒ€ํŒŒ +ํ™ˆ์ฆˆ +๋‚ด๋ฟœ +London +์‚ฐ๋ž€ +์Œ๋ฃŒ์ˆ˜ +๊ณ„๋Ÿ‰ +๋‚˜์ด๋‹ค +๋œป๋Œ€๋กœ +์ €ํƒ„ +ํŽผ์ณ์ ธ +์ธ๊ถŒ์œ„ +์œผํ  +์ฆ๋น„ +๋ฒ ๋„ค์ˆ˜์—˜๋ผ +๊ฒฝ๊ฑด +์ƒ์ฒด +๊ธฐํ˜• +๋ณ€์ œ +##๋ˆ„๋ฅด +ํ•ด๋ณธ๋‹ค +์—ฐํ•„ +ํํ—ˆ +์„ ๊ฑฐ์ธ +๋ฉˆ์ถ˜ +์„ธ๋ฒ• +๊ฒฝ๋ฉธ +##๋งค๊ธฐ +์›”๋‚จ +์ถฉ์› +##๋ณด์‚ด +๊ฑฐ์ทจ +์ฐพ์•„๋‹ค๋‹ˆ +์•Œ์•„๋“ค +ํ”„๋ฆฐํŠธ +์–ด๋“œ +๊ธ‰์ˆ˜ +์†์ง“ +๊ท€๋” +ํ™ฉํ™€ +๊ทธ๋ฆฌ์Šค๋„์ธ +๋ฐ•์Šน +##๋Œ€๊ตฐ +๋”๋ฏธ +๊ฐ„์—ผ +๋ชจํƒœ +ํ๋ญ‡ +๋•…๊ฐ’ +ํ† ํŠธ +##๋‹ค๋“œ +Rep +์ƒ์กด์ž +ํผ์Šค +๋ฐฉ๋ฒ•๋ก  +์• ๋ฆฌ +๋ฌธ๊ณผ +๋„ํ‘œ +๊ฑฐํŠธ +์Šคํ„ฐ๋”” +๋ธŒ๋ ˆ์ธ +๋‚˜ํƒ€๋‚ผ +์‚ฌ๊ณ„์ ˆ +๊ธฐ๊ฐ• +๋ฒ—์–ด๋‚ฌ +ํผ์ฆ +์Šฌ๋ผ +๋‘๊ฐ +๊ฐˆ๋Œ€ +์ฐธ๋‹ด +๋„์„ฑ +์ˆ™์—ฌ +์‹คํ™” +๋„˜๋ฒ„ +์ €๋Ÿฌ +์ œ๋„ค์‹œ์Šค +์ค‘์‹ฌ๋ถ€ +์•„๋‹ˆ์•ผ +๋“ํ‘œ์œจ +๊ถŒ์˜ค +๋ผ๋ฏธ +๋ฉดํšŒ +๋ฆฝ์Šคํ‹ฑ +##๋„๋ก +์ƒˆ๋งŒ๊ธˆ +##์‚ฌ๊ด€ +์•„๋‚Œ +์™ธ์ง€ +๊ฐ•์ˆ˜๋Ÿ‰ +๊ตฌ๋ฏผ +๋ชธ์‚ด +KL +์ˆœ์ฒœ์‹œ +์ค‘์•™์„  +์—ฐ๋ฆฝ +๋‚œํ•ญ +๋…น์—ฌ +##์ผ์น˜ +๋งŒ๋“ ๋‹ค +๊ฐˆ๊นŒ +๊ถŒ๋ ฅ์ž +์•ˆ์žฅ +from +์ง€๋ง +๋Š๊ธ‹ +๋ผ์นœ +##๊ฐ€์‹œ +๋…ธ๋ฒจ์ƒ +ใ†๋„ +1912 +๋“œ๋Ÿฌ๋‚ธ๋‹ค +์ฑ…๋ฌด +##ํŠธ๋ฆญ +PF +๋•…์ฝฉ +๋ผํŒŒ +๋ฐ•๋ช… +์•„์ฐ” +๋†์ž‘๋ฌผ +์กฐํ˜•๋ฌผ +ํ•™์ž๊ธˆ +์š”๊ธฐ +์ธํ„ฐํŽ˜์ด์Šค +๋Œ€์ถ” +ํ‡ด์น˜ +ํฌ๋ฆฌ์Šคํ†  +๋Œ์•„์™€์„œ +๊ด‘ํ•™ +์„ ํ›„ +๋…ธ์  +๋ฉ”๋ฐ€ +์งˆ์†Œ +##ํญ๋ ฅ +๋ง๋ฌธ +##๊บผํ’€ +๋ฐฐ์ฒ™ +๋นŒ๋ฆฐ +##yp +๊ทธ๋Ÿฌ๋‹ˆ +##์„๊นŒ +์ฐฝ๊ฑด +์•ˆ์ฐฉ +CM +์™”์—ˆ +BN +๋ฏธ๋น„ +๋ถ€๋ฆผ +##์˜์žฅ +๋‹จ์ธต +๋ฒ•๋Œ€ +์“ฐ๋ฆฌ +์žก์•„๋‹น +##์ง„๊ตฌ +##์˜ˆ๊ธˆ +##๋ฐธ๋ฆฌ +์ธ์ˆ˜์œ„ +๋ณ€์ธ +๊ฒฝ์ฃผ์‹œ +##oo +์‚ฐ์ฒญ +##์†ก์ด +๋Šฅ๋ฅ  +๋ง‰์Šค +๋งด๋Œ +์˜ค์คŒ +์ƒ๋™ +ํ˜„ํ–‰๋ฒ• +์•„๋“ +์ˆ˜์ˆœ +๊ธˆ๋…„ +๋ณด์ž‰ +๋ฒ•์น˜ +204 +์š”๋ฅด +์‚ฌ์ •๊ด€ +๋ฏธ์•„ +์ž๋ง‰ +์™„๋„ +์ง€๋ฏธ +##ings +๋ชจ์Šค +๋ณต์‹ฑ +ํ”๋“ค๋ฆผ +์‚ฐ์ „ +๊ฐ์„ +์„œ์šธ์—ญ +์‚ฐ๋œป +๋ฉํ•˜๋‹ˆ +์„œ๋ž +ํฌ์†Œ +์ฃผ๊ฑฐ์ง€ +ํ˜ˆํ†ต +##์‚ฌํšŒ +ํ• ๋ถ€ +๋„์šฐ +์นดํ…Œ๊ณ ๋ฆฌ +์ „์—… +๋ฐฐ๋ฐ˜ +๊ฐ์ดˆ +๋ฐ”ํ‹ฐ +์ด๋™๊ตญ +๋ถ€๋Ÿฌ +๋ผ๋‹ˆ๊นŒ +##๋…๋ถ€ +์„ฑ์žฅ๊ธฐ +์†Œ๋Ÿ‰ +๋…ธ๋ž‘ +์ œ์†Œ +์ด์•Œ +๋ ˆ๋ฏธ +ํ™˜ํ•œ +๋งˆ์ผ๋ฆฌ +##์ด์–ดํ‹ฐ +์œ ํ•™์ƒ +##ma +๊ฑฐ์ฃผ์ง€ +ํ›„์ผ +์™•์กฑ +##sch +์•„์ฟ ์•„ +๋Œ๋ฆฐ +์†Œ์ž +์ „๋ฅ˜ +๊ตญ๊ณผ +์ค‘์†Œ๊ธฐ์—…์ฒญ +##uct +๋ณ€์ด +์ œ๋ช… +์„ธ์„ธ +NP +##์•„์•„ +##ery +๊นœ๋นก +์Šค๋งˆ +##์œ„์„ฑ +์ฒœ์•ˆ์‹œ +##rat +์•ˆํƒ€๊นŒ์›€ +๊น€๊ด‘ํ˜„ +๋ถ„์žฅ +์ ์ค‘ +๊ณ ๊ท€ +๊ฑธ์–ด๊ฐ” +๊ฐ•์„œ +๊บผ์ง€ +##app +##๋ ˆํƒ€ +ํ—ท๊ฐˆ +์ฒœ์—ฐ๊ฐ€์Šค +##ํ•˜์šฐ์Šค +์Šค์ผ€์น˜ +์˜๊ฒฐ๊ถŒ +๋ฌด์ˆ  +๊น€์ข…์ธ +๋ฌธํ™”์˜ˆ์ˆ  +##ble +ํ—Œ์ • +##gram +##๋“์ด +380 +##๋ธŒ์Šค +ํžˆ์–ด๋กœ์ฆˆ +LPG +##์ง€์› +๋Œ๋ฐœ +ํŽด๋‚ธ +์—ญ์‚ฌํ•™ +##๋“ค๋ฆฌ +์ฃผ์  +##๋…ธ์†Œ +์ฆ์—ฌ์„ธ +ํ—ฌ๋กœ +ํ’์Šต +##echn +์ œ๋„ค๋ฐ” +๋ž˜ํผ +์นด์ŠคํŠธ +๋ชธ๋‹ด +์—๋กœ +##๋ชจ์Šค +์นด๋„ค +๋ฐœ์ž‘ +์—์ด์ฆˆ +์ด๊ธ€ +๋‹ค์œˆ +๊ณ„ํš์•ˆ +๋งˆ์นด์˜ค +๊ฐœํ•ญ +๋ฐ‘๋ฐ”๋‹ฅ +##aus +๊ณผ๋Œ€ +๋‚ฎ์ท„ +ํ™ฉํ†  +๋ ˆ์ „ +๊ฒฝ๋น„์› +๋™์‹œ๋Œ€ +๋ฐฐ๋‹น๊ธˆ +์ดˆ์ŒํŒŒ +System +๋’ค์ง‘์–ด +์†Œ์šฉ๋Œ์ด +๋…๋ฆฝ์šด๋™๊ฐ€ +๋˜๋ ท +์‚ผํ‚ค +๋ณด๊ณ ํšŒ +๊ฒ€์‚ฌ์žฅ +Ne +๋ฝ‘ํ˜” +์—Ž๋“œ๋ ค +๋ณด์œ ์ž +์Šคํ…Œ์ธ +๋…์žฌ์ž +๋“€์˜ค +์ œ๊ฒฉ +ํƒ„์ˆ˜ํ™”๋ฌผ +์ œ์ฒœ +๊ถŒํ–ˆ +##์ ˆ๊ธฐ +Am +๊ฐ•์•• +์ถœ์ž…๋ฌธ +ํ™˜์ „ +๊ท€๊ฒฐ +๊ฑด๋“œ +๊ธฐ๋…ํ’ˆ +NO +์œผ๋ฉด์€ +๋งž์„ฐ +์Šต๋„ +๋™์งˆ +์ขŒ์ธก +It +์ฝฉ์ฟ ๋ฅด +์ฒญ๊ณ„์ฒœ +๋ณผ๊ฒŒ์š” +๊ดด๋กœ์›€ +์บํ”ผํƒˆ +์ด˜์ด˜ +์ฐธ์—ฌ์—ฐ๋Œ€ +๋‹ค์นœ +์นด๋”” +๋“ฑ๊ต +๋งˆ์•ผ +๋ณ€์ฆ๋ฒ• +##๋”์Šจ +๋‹ฌ์•„๋‚ฌ +๋’ค์ง„ +์‚ฐ๊ธธ +##red +๋ฐ›์ณ +๊ณ ์‚ฐ +๊ตํ†ต์•ˆ์ „ +sc +๊ณต์–ธ +ํœ˜ํ•˜ +๋Œ€์›… +##์—๋ฏธ +๋‚ด๊ตฌ +์‹ ๋ณด +##์˜ค๋ฅธ +์„ฑ๊ณผ๊ธ‰ +์ง‘๋ฌด์‹ค +๊ธˆ์ • +##ํ˜ธํก +##์žฌ๋ฃŒ +์ด์—ฌ +์žํŒ +CA +์–ด์ฒ˜ +์ •์€ +์ผ€๋ƒ +์ž๊ธ‰ +๋„˜๊ธธ +์ธ๋Œ€ +์ˆ˜์‹ญ์–ต +์ˆ ์ž๋ฆฌ +๊ณ ์ธต +๋ธŒ๋žœ +์ „๋Ÿ‰ +๋‚ด๋ฌด +๋งน์ž +ํฌ์œ  +์„ค๋‚  +๋…ธ์กฐ์› +์‚ฌ๋Œ€๋ถ€ +๋…์†Œ +์ง„๋ณด์‹  +##์ˆ˜๋‹จ +์ง€์ง€๋ถ€์ง„ +##์ด์ƒ +์œก์‹  +๋ชจ์„ฑ +๋…ธ๋ น +์ ˆ๋ฌ˜ +๋ด„๋‚  +์•ผ์‹ +๋ฆฌ์–ผ๋ฆฌ์ฆ˜ +๋Œ์•„๊ฐ„๋‹ค +##ream +์นด์šดํ„ฐ +์—ฐํ˜ธ +##ํฌ๋ผ์ด๋‚˜ +๊ณ„์•ฝ์ž +๊ธฐ๋…์‚ฌ์—… +๋‹ด์•„๋‚ด +๋น„์ฃผ๋ฅ˜ +์†ก๊ตฌ +์†Œ์ž‘ +์•Œ๋ฆผ +๋‚จ์ธ +๋ณ‘๊ธฐ +##์†Œ์ˆ˜ +##๋ฉ”๋ฅด +๋ƒ‰์†Œ +๊ธฐํƒ +๋งค์นญ +##๋“œ๋ฆฌํžˆ +์—ฌ์ฃผ์ธ๊ณต +์ฃผ์ฐฝ +์ปค๋จธ์Šค +Fl +์–ด์‹œ์ŠคํŠธ +์ •์˜ˆ +##ium +๋ˆˆ๊ฐ€ +๊ผฌ์น˜ +ํ™œ์ฃผ๋กœ +ํ›„๋ฉด +๊ฐ‘์˜ท +๊ฒฝ์•… +์˜ฌ๋ ค๋‹ค๋ณด +๋…ธ๋ฆฐ +์‹์ˆ˜ +๋งŒ์กฑ๊ฐ +๋ ค๋‹ค๊ฐ€ +๊ฒธ์ž„ +ํ—ˆ์ • +##que +๋„์  +์ค„๋ฆฌ +##์นด๋ฅด +ํ”ผ์น˜ +์บ˜๋ฆฌํฌ๋‹ˆ์•„์ฃผ +์–ด์ฒ˜๊ตฌ๋‹ˆ +์—ด์ „ +4500 +์„œ๋‘ +SE +โ €โ €โ € +ํ†ต์น˜์ž +ใ… ใ… ใ… ใ…  +์šฐํฌ๋ผ์ด๋‚˜ +๋‹นํŒŒ +์กฐ์ง์› +๋ชปํ•ด์„œ +์ƒ‰๊ฐ +์Šฌ๋ฆฌ +ํ•œํƒ„ +๋งˆ๋‹ˆ +ํ’์ˆ˜ +##22 +SDI +##๋ฐฐ๊ธฐ +ํ™ฉ์šฐ +๋‹ด๋‹น๊ด€ +ํ• ์•  +์ „์ง€ํ›ˆ๋ จ +๊ฒ€์žฅ +๋ถˆ๋ฆฐ +๋‘๋ฅด +๋ฒ„๋ ค์ง„ +์šฐ์Šต +์ž ์‹ +ํ”ผ์‹ฑ +๋งž๋‹ฟ +ํ•œํŒ +##๋ธํ”ผ์•„ +๋ฌธ๊ณ  +๊ฐ€ํ–ˆ +ํ•™์žฅ +๋‚ด๊ณต +ํ•œํ…Œ์„œ +๋‚จ๋…€๋…ธ์†Œ +์—ฌ๊ฐ์ฒ ๋„ +์•„์ด์œ  +์ถ•๊ตฌ๋‹จ +์†๋ณด +๋งŒ๋Šฅ +์Šต๊ธฐ +ํ™”์•ฝ +์ €์„ฑ์žฅ +์พŒ๊ฐ +์—ญ๋ ฅ +์™ธ๋กœ์šด +์š”์ฝ”ํ•˜๋งˆ +์•„์น˜ +์ด์šฐ +๋™์›” +๋ฒ„ํ• +ํŽธ๊ณก +์ถฉ์ฒญ๋„ +ํ›„์˜ˆ +๋‘˜๋Ÿฌ์‹ธ์—ฌ +์ „์ž… +he +๋ชจ๊ตญ +์–ธํ–‰ +์ •ํ•  +๋ ˆ์ „๋“œ +ABC +๋‹จ๋ง› +๋’ค๋Œ +##ons +์œผ๋ƒ +์ดํ„ฐ +๊ฒ€์ฐฐ์ฒญ +๊ฐ๊ด€์ ์œผ๋กœ +ํ•„๋ผ๋ธํ”ผ์•„ +ํฌ๋ฆฌ์Šคํ‹ฐ +์ ์‹œํƒ€ +์ž๊ธ +ํ™”๊ธฐ +๋ฌผ์˜ +๋ฌด๋„ +์‚ฌ์—…์ฒด +์ŠคํŠธ๋กœ +๋‹ค์ง„ +##FF +๊ฐ€์ ธ์™€ +ํ”„๋กœํ•„ +๊ธ‰๋ถ€์ƒ +1906 +์ œ๋„ˆ +์ŠคํŠธ๋ ˆ์นญ +์ˆ˜์ถœ์•ก +๋ฐ•์ธ๋น„ +๊ณผ๋ถ€ +๋น„๋ฌธ +์ฐธ๋œ +##์ฐฉ์˜ค +##ward +์—„๊ฒฉํžˆ +๊ฐœ์—ฐ +๋ฐ˜์ชฝ +์ฒฉ๋ณด +##๋ผ์ง€ +์ถ•๋ฌผ +##ํด๋ฆฌ์Šค +๊ฒฝ๊ณ„์„  +๋ฆฌ์ธ  +๋ช…๋ฃŒ +๋ณ€๋น„ +##๋‹ˆ๋‹น +๊ณ„์–‘ +ํŒ๋‹ค +ํŠน๋ชฉ +Ab +์Šฌ๊ธฐ +๋ชจํ…” +๋„ค์ผ +์ˆ˜์ฒœ๋งŒ +์ €ํƒ„์†Œ +์‹œํ–‰์ฐฉ์˜ค +์†Œ์ • +์˜€์œผ๋‹ˆ +ํฌ๋ผ์ด +๋‹จ๊ตญ +๋น„์„ +์ฐธํŒจ +์ด์„ฑ๊ณ„ +Is +##๊ฐ€์š” +๋ธŒ๋ฆฌ์ง€ +์ ‘์  +##ํ•˜ํ‚ค +์„์ƒ +ํ† ํ•ด +์ด์กฐ +๋ช…๋ถ€ +์บํ”ผํ„ธ +ํฌ๋งท +์Šคํ…” +์šฉ์ˆ˜ +์ž‘๋ณ„ +##๋ฌธ๊ณ  +์‹ ์„ธ๊ณ„๋ฐฑํ™”์  +์•จ๋ฆฌ์Šค +๋งก๊ธธ +##rav +##๋ด๋„ +Mc +๊ตต์ง +ํ†ต์ฐฐ๋ ฅ +์ˆ˜์ค +ํ—ค์—„ +๋ฌธ์„ฑ +๊ฑธ๊นŒ์š” +์ถ”์ง„๋ ฅ +์ž„์ฐฝ +์ฒดํฌ์นด๋“œ +##์ผ€๋ฏธ +๋‹ค๋ฃฌ๋‹ค +์šฉ์  +๋‚จ๋ฐฉ +LP +Pa +ํƒ€๋ฅด +๋ชฉ์š•ํƒ• +์ƒ์ถ” +##๋ถ€์ธ๊ณผ +##๋ณด์ฆ +๋ˆ„์Šค +์—˜์ง€ +๊ณผ๊ฑฐ์‚ฌ +์—ญ๋ถ€์กฑ +๋†“์˜€ +๋‚œ๋ฌด +์š”์ฆˆ์Œ +์šฐ์™• +์•ˆ๋งˆ +๊ธˆ์ƒ +์ •์„ +ํ๋ ด +์‹ ๋ณ‘ +์ €๋ ฅ +##ํ…Œ๋ฆฌ์•„ +๋Œ€ํ•œ์ œ๊ตญ +๋ถ€๋ฆ„ +๊ธฐ๊ป +๊นŒ๋‹ค๋กญ +์‚ฐ๋ถ€์ธ๊ณผ +๋›ฐ์–ด๋“  +๋ธ”๋ž™ํ™€ +##๋ชจ์–ด +์ผ€๋นˆ +ํ†ตํ†ต +๋‹น๊ฒจ +๋Œ€๋‚ด์™ธ +๋’ค์ ธ +๋‚ด์ค€ +๊ตฝํžˆ +๊ณต์‚ฐ์ฃผ์˜์ž +##๋ ˆ์ŠคํŠธ +##๋–จ์–ด์ง„ +์ˆ˜๋‡Œ +์›๋„ +์ˆ˜๋กœ +์• ์ผ +์ฃผ์กฐ +์•ˆ์ƒ์ˆ˜ +๊ป๋ฐ๊ธฐ +์žฅ๋‚ด +์›”ํ™” +๋– ์˜ค๋ฅธ๋‹ค +๋‚˜๋ญ‡์žŽ +์ง€๋ฆ„๊ธธ +##rid +ํ™œ์ž +์ฆ๊ฑฐ์›Œ +์›จ์ผ +๋‹ค๊ตฌ +##์–ด๋ง +๋งก๊ธด +์ „์ž„์ž +๋งˆํ•˜ +##ํ“ฐ๋ฆฌ์ฆ˜ +์กฐ์„ ์ด +ํ๋ฆฌ +ํ‚ค๋ณด๋“œ +##์ด์Šคํ„ฐ +์ถ˜์ฒœ์‹œ +ํฌํ“ฐ๋ฆฌ์ฆ˜ +์ด๋žœ๋“œ +๊ฒฝํ™” +๊ถ๋ฆฌ +##๊ฒŒ์ด +์—ฐ๋ฏผ +๊ณ ์กธ +์ “๊ฐ€๋ฝ +์ปค๋ธŒ +ํ–‰์•ˆ +๊ธฐ์–ต๋‚˜ +๋ญก๋‹ˆ๊นŒ +์ง€๋ฐ” +๋ถˆ๊ฐ€๋ฆฌ์•„ +์ง„๋‹ค๊ณ  +##ํžˆ๋กœ +ํŠœ๋ธŒ +์ด์ฒญ์šฉ +์‹œ๊ฐ€์ง€ +##ํผํŠธ +์„ผํ‹ฐ๋ฏธํ„ฐ +๊ตฐ๋ณต +์žฌ์—ฐ +์ „๋งค +์ž๊ธ์‹ฌ +๊ณต๊ธ‰์ž +์กฐ์„ ์ด๋…๋ถ€ +์ •๋ น +##ํ„ฐ์นด +์กฐ์„ ์กฑ +ํŒŒ์ถœ์†Œ +๋†“์•„ +๋ฐ•ํ˜„ +์™”์œผ๋‚˜ +์†Œ๋น„๋Ÿ‰ +์•„๋ ˆ๋‚˜ +์„ ์ฃผ +์ฒ˜์ ˆ +##ope +ํŒŒ์ฃผ์‹œ +์‹ ๋ถ„์ฆ +ํœด๋ฌด +ํ”Œ๋ผ์›Œ +์™•๋ž˜ +๋ณ„์ž๋ฆฌ +์‹œ์•  +ํŒŒ๋ฉธ +##๋ฆฌ์ผ€ +๋ Œํ„ฐ์นด +##์ง€์—ญ +ํ•ดํ˜‘ +์—ญ์‚ฌ๊ฐ€ +๋ฐ”๋ž„ +์ ๊ตฐ +์„œ์œ ๋Ÿฝ +์œ ๋‹› +๊ตด๋š +ํŠน์•ฝ +๋ฌด์˜ +์ •๋ชฝ์ค€ +๋…ผ์„ค +##ํŠธ๋Ÿผ +##์žํ‚ค +์†กํ™˜ +๋ชฐ์•„๋‚ด +##๋‚ ๋“œ +์ „์ธ +์ •ํ˜ธ +์ €์ž‘๋ฌผ +##์ง•๊ณ„ +๊นŒ์น˜ +๋‚จ๋™์ƒ +์ง€ํฌ +๋ฌผ๋™ +์•ผ์ˆ˜ +##๋šœ๊ธฐ +##ํ˜œ์ง„ +๊นŒ๋งˆ๊ท€ +๋ณต๋ฉด +์ˆ˜ํ•™์ž +##๋ฆฌ์šฐ์Šค +๋งŒ๋ฅ˜ +์‹ฌ๋ถ€๋ฆ„ +##๋ฑ…์ด +##์ œํ‘œ +์œŒ๋ฆฌ์—„์Šค +๋จธ์ง€์•Š +์žฌํ˜‘ +๋“ฑ์šฉ +์ž„์‹œํšŒ +๋Œ€๋‹ˆ +๋‹ฌ์„œ๊ตฌ +์„œํฌํ„ฐ +์‹œ๋„๋Ÿฝ +๋ฌ˜์—ญ +์†Œ์‹ค +ํŒŒ๋ฌป +์›€์ง์ธ๋‹ค +๋‚ด์‚ฌ +์—ฐ๊ณจ +์ž‘์ž +##amb +ํ•˜๋ฒ„๋“œ๋Œ€ +๋†€๋ผ์›€ +์“ฐ๋Ÿฌ์ง„ +์šฐ๋ฃจ๊ณผ์ด +์—ฐํ‰๋„ +์ผ์ง€๋ผ๋„ +์Œ์–‘ +์ด์œ +๊ฐ๋Ÿ‰ +๋ฌด๋ฆ…์“ฐ +๋ฐ•๋ฏผ +๋‚ ๋กœ +ํ† ๋ฅด +์ˆœ์‘ +##ํญํ’ +๊ตฐํ•จ +๋ฒˆ๋ณต +๊ธฐํ–‰ +๋ฐฉ๋ง์ด +๋•Œ๋ ธ +๊ฐœํšŒ +๊ณต์  +๋ฐœ๊ฐ€๋ฝ +๋ฐฉ์‚ฐ +์ˆ™์› +์œผ๋ผ๋Š” +์ˆ˜์œ  +๋ฒ™์ปค +์œค์„๋ฏผ +์›์žฌ๋ฃŒ +๋Œ€์ œ +์œ ๋ถ€ +์ด‰๋งค +๋ณด๋”” +Journal +SD +##๋ ˆ์ธ์ง€ +์• ๊ฒฌ +ํœด๊ฐ€์ฒ  +์žฌ๋ฌด์ œํ‘œ +์„ธ์ฐจ +๊ฐˆ๋ฆฌ +๊ฐ•๋‚จ์—ญ +##๋ผ๋งˆ +๊ตฌ๋ช… +๋– ์•ˆ +##๊ตฌ๋ฆ„ +ํ•„๋‘ +์ง๊ฒฉํƒ„ +๋ช…์ธ +์ŠคํŽ™ํŠธ๋Ÿผ +๋ชจ๋‚˜ +##ํ‘ธ๋ฅดํŠธ +์‹œํ™ฉ +๋ถ€์ธ  +ํ†ตํ•œ๋‹ค +๋งˆ์ผ๋ฆฌ์ง€ +์„ฑํ˜•์™ธ๊ณผ +์ •์‹ ์—†์ด +๋น„์ถ• +ํ”Œ๋ ˆ์ด์Šค +์‹ฌ๊ธฐ +๋ฏธ์‹ +๊ต์œก์žฅ +์ดํƒ€ +์žฌ๊ธฐ +##์กฐ๋ฆผ +NBA +๋‹ค์ด๋น™ +์œ„์—„ +ํ˜„์ถฉ +์Šน๋‚™ +์œก๋ฅ˜ +์ง€ํŒก์ด +๊ตฌ๋ณธ +์ ๊ฒฉ +##์ฆˆํ‚ค +๋ฒˆ๋ฒˆ์ด +์œ ์ฐฝ +๊ฒฐ๋ ฌ +ํŒŒํŠธ๋„ˆ์‹ญ +๋ถ€์ง€์‚ฌ +์ œ๊ฐ๊ฐ +##au +์‚ฌ์•… +##iness +๊พธ์ง– +##์ด์„ผ +๋Œ€์ž‘ +์“ฐ์ผ +##์†Œ์†ก +์ธ์ฒœํ•ญ +์ฒœ์ž +##์ง„๋‹ค๋Š” +์‚ฌ์นด +๋ฐœ์ง +๊ณ ์ฐฝ +์ง€์ ์žฅ +ํ—ˆ๋‹ˆ +์–ด๋ ค์šฐ +๋Š”๋”” +##PA +์ด์—ฐ +์ ‘๊ฒฝ +ํ•™๊ต์žฅ +์—ฐํ•œ +ํ”„๋กœ์ด์„ผ +์ „ํ•  +ํƒ€๊ฐœ +##๋”๋‹ˆ +์‹œ๋‹ฌ๋ ธ +๋ฌ˜์†Œ +์นญ์†ก +๋”ฐ๋ผ์•ผ +๊ธฐ๋ณด +์›ฌ์ผ +๋†“์ณค +์ œ์ˆ˜ +##ํƒœํ‰์–‘ +๋ฒจ๋ผ +์ƒํ›„ +ํ‹ˆํ‹ˆ +๋ผ๋ฒจ +๋“ฑ๋‹จ +##ough +ํ—Œํ˜ˆ +##์—๋ฏธ๋ฆฌํŠธ +๊ตญ์ œ์„  +ํ”„๋ฆฌ๋“œ๋ฆฌํžˆ +์ œ์•ฝ์‚ฌ +์ด๊ฐ +์ง€๋ฐฐ์ž +##์˜คํ”ผ์•„ +๋Œ€์šฉ๋Ÿ‰ +๋‚˜ํŠธ๋ฅจ +์ปจํผ๋Ÿฐ์Šค +์›ํšจ +๊ทธ๋ฆฌ์Šค์–ด +์•„๋ž์—๋ฏธ๋ฆฌํŠธ +##rap +์ž‰ํฌ +ใ†๊น€ +๋ผ์šด์ง€ +๊ทธ๋ฆฌ์›Œ +ํŒŒ์šฐ๋” +์žก์Œ +์–ด์ˆ˜ +##we +๋งˆ๋ฅดํฌ์Šค์ฃผ์˜ +๋†€๋ฆฌ +์‹ญ์ƒ +##ute +AF +์†ก์ • +์–ด์„ค +ํ˜ธ์„ธ +๋‘ํˆผ +์—ํ‹ฐ์˜คํ”ผ์•„ +์ง„์†” +๋ƒ ๋ƒ  +์กฐ๊ทธ๋งˆ +์•ผ๊ฒฝ +##afe +์ง€์ฃผ์‚ฌ +์ง€์–‘ +##๋ฐฐ์ถ” +not +๋‚ด๋ ค์กŒ +๊ทธ์ง€์—† +๋‹จ๊ฑฐ๋ฆฌ +๊ฑฐ๋ž˜๋ฒ• +์„๊ถŒ +๊ฑฐ๋ฌผ +๊น€์˜ฅ +์ธ์Šน +๊ฐ€๋ฒผ์›Œ +๋ถ์œ ๋Ÿฝ +์„ ์ข… +ํ•จ์•ˆ +์™„๊ฒฐ +์ชฝ์ง€ +์ฝ”์ŠคํŠธ +์˜จํ™” +์ž์นด +๊ธฐ๋…๋น„ +ํด๋ผ์ด +๋Ÿฌ์Šค +์ด๋ณด +๊ณ ์ด +์‹ฌ๋ฆฌํ•™์ž +์ฃผ์ด +์ฒด๊ฒฉ +ํ”ผ๋ Œ +##์ฒด์ธ +๋ฐฉ์กฐ +์„ ์ง€ +๋Œ์–ด๋‹น +์ฒญ์‚ฌ์ง„ +์ƒ๊น€ +๊ด€ํ˜„ +์กฐ์ง๋ ฅ +์ŠคํŠธ๋ผ์ดํฌ +ํŽธ์ค‘ +๋งž์ถฐ์„œ +์œผ์…” +ํ…๋ฐ์š” +์ •์•ฝ +์—ญ๊ฒฝ +์—ด์—ฐ +๋‘๋ ค์šด +๋‚˜๋Œ +์›€์ธ  +์ฃผ์˜๋ณด +๋ชฉ์กฐ +์ „๋ฆฝ์„  +์™ธ๊ตํ†ต +##๋Œํ”„ +##IE +๊ฑฐ์นจ์—†์ด +์›Œ๋“œ +์ง„์ฒ™ +##any +์ข…์˜ +##์šฐ๋ณ‘ +##ose +์Šคํƒ ๋“œ +๋‹ค์Šน +์‹ ๋ด‰ +์ž๋ณธ๊ฐ€ +๋งŒ์ธ +ํ…Œ๋ ˆ +์ง€๋กœ +์ „์ž์ฑ… +์ž์•„๋‚ด +์˜ˆ๊ฒฐ +๋‹นํ•œ๋‹ค +ํ๋Š +๊ณ„๋ฃก +๋‚ฎ์ถ˜ +๋น„์นœ +์ฒ™๊ฒฐ +์†Œ์ด๋‹ค +๊ด‘๊ธฐ +๋Šฆ์ถฐ +๊น€ํ˜ธ +##๋ธ”๋žœ๋“œ +์ˆ˜๋ฐฐ +ํ•œ์Šค +์ •์ƒ๊ธ‰ +์ค‘๋žต +์‹๊ธฐ +##ott +์ด์™€ +NHK +์•„์ด์Šคํ•˜ํ‚ค +##๋‹ค์˜ค +์ƒ๊ฒจ์„œ +๊ณก์„ฑ +์ด๋ณ€ +ํ“จ์ „ +์กฐ๋ฏธ +์–ด๋•Œ์š” +์ฒ ๊ทผ +##ํƒฑํฌ +์‹œ์• ํ‹€ +๋ฏธ๋Ÿฌ +์—ฐ๋ฃŒ์ „์ง€ +์œ„์•” +ํŒŒ์ด๋‚ธ์…œ +##๊ทธ๋จธ๋‹ˆ +##์Šค์ผ€ +ํŒŒ์ด์–ด +ํžˆ๋ง๋ผ +๊ด‘๊ณ ์ฃผ +๋ถ€์ฒœ์‹œ +##์„ธ๋ฐ์Šค +๋ฐฑ์ƒ‰ +๋ฆฌ๋ฉ” +์กฐ๋™ +์œค์˜ +๋ ˆ์ด๋”” +์†์•„ +##GB +์›์–ด +ํ•ด๊ตฐ๊ธฐ์ง€ +๋Œ€๋„ +ํ•ดํƒœ +๋ ˆ์ด์‹ฑ +ํ•™์ˆ ์ง€ +์•Œ์ œ๋ฆฌ +๋ฉ”๋ฅด์„ธ๋ฐ์Šค +๊ณฐํŒก์ด +๋‚ด๋”› +par +๊ณต์ „ +๊ณจ๋ฌธ +##๋“ค๋ ˆ +๋ถ€๋Ÿฌ์›Œ +์›์ดˆ +์ •๊ฑฐ +ํด๋ฆฌ๋ธ”๋žœ๋“œ +1904 +๊ตฐ๋ถ€๋Œ€ +์ „์œ„ +์Šค์ผ€์ดํŠธ +El +๋ถˆ๋ถ„ +์œผ๋ฆฌ๋ผ๊ณ  +์ฐฐ๋ฆฌ +์—ฐ๊ธฐ๋ ฅ +ํ•ญ์šฐ +๊ณ ์„  +##๋ฐ”ํ‚ค์•„ +์ฒ˜๋ฆฌ์žฅ +##๋ฐ๋ฅด +๋ฐœํ™” +๋‚˜๊ฐ„๋‹ค๋Š” +๊ณ ๋ฅผ +ํ˜•์šฉ +์ฐฝ๋ฐฑ +##๊ณต์„œ +##ener +๋™์ฒœ +์ œ๊ณผ +์˜ฌ์ธ +ํ”ผํ•ด์•ผ +ํ•ญ์‚ฐ +ํŠธ๋ผ์ด +๋ณดํƒฌ +๋ฒ„๊ธˆ +ํŠธ๋กœ์ด +ํ–ˆ์ž–์•„์š” +์ง€๊ทธ +##๊ธฐ๋Šฅ +์‹คํ˜• +์ •ํŒŒ +๊ฐ€์ ธ๊ฐ” +๋ฃจ๋น„ +์‚ฌ๋“ค์ด +์Šฌ์Šฌ +์‚ฌ๋ฌด๊ด€ +๋ณ„์„ธ +์™•๊ถ +ํ‰๋…„ +๋ณ€๋™์„ฑ +๋‰ดํ„ด +๋ฐ”๊ตฌ๋‹ˆ +๋…ธ์‚ฌ์ • +ํƒœ์–ด๋‚ฌ์œผ๋ฉฐ +์˜ค์„ธํ›ˆ +##ange +์‹œ๋“ค +์Šค์ปคํŠธ +์‹ญ์ž๊ตฐ +๋ณ‘์šฐ +ํ‚คํŠธ +ํ‰ํƒ์‹œ +์ „๋ž˜ +์ •๋ณ‘ +๋ฆฝ์‹ +##ision +๊ต๋ž€ +์ƒˆ๊ธฐ +##ํ‹€๋ฆฌ +์ˆ˜ํ˜‘ +ํ‰๋ฏผ +์‚ฌ์€ +์ฐพ์•„๋ƒˆ +๊ฐ€๋„ +##ํ๋ฌผ +๋ฌผ๋Ÿฌ๋‚œ +์ž๋ผ๋‚˜ +์ฝ”์•ž +ํ‡ด์‚ฌ +WBC +๋‚ด๋ถ„ +ํžˆ๋ง๋ผ์•ผ +##์ผ€๋ฏธ์นผ +์šฉ์‚ฐ๊ตฌ +์˜ค๋น„ +์น˜๋ฃŒ๋น„ +##ames +๋”ฑ์ง€ +์ฃผํƒ๊ฐ€ +์˜ค๋ธ +์–ด๋ฌต +๋„์‹ +ํŒจ์†Œ +๋ฌด์˜์‹ +๋น„์”จ +๋ฐฐ์—ญ +์‹ฌ์žฌ +๊ฐ€๋œฉ +์•ˆ์ค‘๊ทผ +๋ฐ›์•„๋“ค์ธ +##๋…ธ๋™ +์นด๋ฉ” +์ฆ‰ํฅ +Dav +๋–จ์น˜ +ํ•˜๋ฆฌ +์–‘ํ‚ค์Šค +ํ˜ธํ™˜ +์™ธ์ฃผ +Ins +๊ฐ‡ํžŒ +๊ฐ„๊ฐ„ +CIA +์•„๋‡จ +์ด๋™์› +์žฅ๋…„์ธต +๊ทผ๊ต +##์ค€ํ˜ธ +์—ผ๊ธฐ +๋‹ค๋ถ„ +์ˆ˜์„  +๋“ค๋ฆด +##๋ฉ”์ธ +์ตœ๋ฏผ +๋ฒกํ„ฐ +๋ถˆ๋ถ„๋ช… +SA +๋™๊ธ‰ +์•Œ๋ฆฐ +์˜ํ–ฅ +1909 +๊ฑฐ๋จธ์ฅ +๋ชฌํ…Œ +๋ฐ˜๋™ +##๋ ๋ผ +๊ฐ€๋ฆฌ์ผœ +์žฌํ˜‘์ƒ +ํŒŒ์‹œ์ฆ˜ +๊ฐ€๊นŒ์›  +ํ›„๋“œ +๋ฏธ์›Œํ•˜ +##๋จน์ด +์ฒดํ˜• +##erg +์™ธํˆฌ +๋‚˜๊ธฐ +์‹œํฌ๋ฆฟ +์Šค๋ชจ +์‹ฌ๊ทผ +๋ถˆ์ˆœ +์œ„์ธ +๋ผ๊ตฌ์š” +์ขŒํ‘œ +์ธ์‹ฌ +ํ’ˆ์•ˆ +์ •๋ฏผ +๋…ธํŒŒ +ํ›„ํญํ’ +์œผ๋ฆฌ๋ผ๋Š” +๋šฑ๋šฑ +##๋…ธ๋ฏธ +EX +๋งˆ๋ฆฌ์˜ค +์•„๋‹ˆํ•œ +๊ฝ๊ฝ +ํ•˜๋…ธ์ด +๋ ˆ๋ฐ”๋…ผ +int +์‚ฌ์ƒ์ž +๊ณ„์‹ค +##ํฌ๋™ +๊ตด๋ฆฌ +์งˆ์ˆ˜๋ก +์•„๋ฏธ๋…ธ์‚ฐ +์• ๋ฆฌ์กฐ +๋ฒ•๋ฌด๋ฒ•์ธ +##VE +๋งˆ์ด๋„ˆ +์šด์ „๊ธฐ์‚ฌ +์—ฐ์ถœ๊ฐ€ +ํ™ฉ๊ธ‰ +๋‚จ๊ถ +์†Œํ™”๊ธฐ +๋ฒ ๋ž€๋‹ค +์˜ˆ์‹ +##์˜ˆ์‚ฐ +๊ทธ๋ฆผ์ฑ… +##ke +์˜น๊ธฐ +๊ฐ€๋œฉ์ด๋‚˜ +์žํ˜• +์ˆ˜์‹์–ด +๊ตฌ๋ ค +๊ฒŒ์ด์ธ  +๋“ค์˜€ +ํ€„๋ฆฌํ‹ฐ +ํŒŒ๋‚˜ +์™•๋”ฐ +์ง€์„  +์—ฐ์„ธ +์ฒญ์ฃผ์‹œ +์šธ๋ถ€ +##์ง€๋จผํŠธ +๋งค๋‹ˆ์ง€๋จผํŠธ +ํ†ตํ’ +##์„ธ์ง€ +์ดํ˜œ +๋Œ€๋ฆผ์‚ฐ์—… +ํžˆ์Šค +์‚ผ๊ตญ์ง€ +๊ฐ‘์ƒ์„  +์˜ค๋ฆฌ์—” +๊ฐ•๊ฐ€ +ํ•ญ๋ณ€ +๊ฐ„์„  +๋ฐ˜์—ด +์ŠˆํŠธ +๊ฒธํ•˜ +๋ฐคํ•˜๋Š˜ +##ํ”„๋กœ +์Šคํ…Œ์ด +์˜๊ณผ +์†์—ฌ +American +์™“์Šจ +์กฐ์„œ +๊ฒŒ๋ฆด๋ผ +๋Œ์•„์˜จ๋‹ค +๋„ค๋ชจ +์•„์ด์˜ค +์ƒํƒœ๊ณ„ +์ปฌ๋ง +๋กค๋Ÿฌ +์–ธ์–ดํ•™ +๋ถ€์กฐ +ํŒŒํŒŒ +Love +๊ฒฝ์‹œ +##RS +์„ค๊ฑฐ์ง€ +ํฌ๊ตฌ +res +ํ„ฐํŠธ +##earch +์‚ฌ๊ต์œก๋น„ +์–‘๋ถ„ +##๋ธŒ๋ฅด +์ดˆ์„ +ํ‘œ์ง€ํŒ +์ง๊ฑฐ๋ž˜ +๋ฐฑ์šด +์‚ฐ์—…์ฒด +์•„์•ผ์ง€ +๊ณ ์ฐฉ +๋“œ์…” +ํ•˜์˜ +์‹ค๋ฆฌ์ฝ˜๋ฐธ๋ฆฌ +๊ฐ„์ง€ +์ผ๊นจ์›Œ +##๊ณ„์•ฝ +##US +์„ธ๊ณ„์‚ฌ +ํ‚ฌ๋Ÿฌ +##der +๋„๋‚œ +์ˆ˜์—ญ +์ •์Šน +Mark +์šธ๋ถ€์ง– +ํ™•๊ณ ํžˆ +Vol +๋น„์„œ์‹ค +๋Œ€๋“ฑ +๋…ธ๋ จ +๊ฒฝ๋กœ๋‹น +ํŠน๋ชฉ๊ณ  +ํ”„๋ž‘ํฌํ‘ธ๋ฅดํŠธ +##๋”ฐ๋ฆฌ +๋‚˜์•„์ง€ +##ene +##RT +์ƒ์—ฌ๊ธˆ +๊ด€์‚ฌ +ํ”ผ๋ Œ์ฒด +์˜์—…์‹œ๊ฐ„ +๋‹ฌ๋ฆด +ํ”„๋กœํ†  +ํ•œ๋ฐ์š” +๋„๋ด‰ +์‹ ๋ณ€ +ํŒ”์ž +๊ธธ๋“œ +๋ฐ€์–ด +ํ˜‘์ฐฌ +์˜ค๋ฉ”๊ฐ€ +la +์ด๊ฒฉ +์„ ์ œ๊ณจ +##๋ฉฐ์‹œ +##๋งˆ์ฐจ +์„ฑ์”จ +์ˆ˜๊ฐœ +1911 +์žฌ๊ฐ€ +ํƒ€์› +๋‚ญ๋งŒ์ฃผ์˜ +์Šคํ•€ +##unic +๋ฏธ์›€ +๋ƒ‰๋ฐฉ +์ˆญ๊ณ  +๋™ํ•ด์•ˆ +ํ†ตํ–‰๋ฃŒ +์†Œ์ค‘ํžˆ +์•ˆ์ „์„ฑ +๋ฆผํ”„ +๋งˆ์š” +##์ฝ”๋‹ˆ +์‹์ƒํ™œ +##orean +๊ถ์ • +์ดˆ๋ก์ƒ‰ +๋ฌด์ œํ•œ +์‹œ์ฐจ +๋ณ‘์ž +ํผ๋ ˆ์ด๋“œ +๋ณด๋”ฐ๋ฆฌ +##์šฉํ’ˆ +์Šค์œ„ํŠธ +์˜์ • +ํŽ˜๋‹ฌ +์™•๊ฐ€ +์‹œ์ดˆ +์ธ๋ฑ์Šค +ํ‹ฐ์Šค +๊ตํ†ต๋ง +์ €์šธ +์—ฌ๊ธธ +์ˆ˜๋ณ€ +์˜ค๋ฅด๋‚ด๋ฆฌ +์‚ฌ์กฐ +์ผ์ง„ +์•ฝ์ดˆ +์‚ฌ๋กœ์žกํ˜€ +๋ฌด์ด์ž +๋…ธ์„ +๋„๋„ +์ž…ํ˜€ +์ „ํ™˜์  +์ •๊ฑฐ์žฅ +๋ฃจ๋จธ +์„ ํ•œ +๊ฒฝ์ง„ +๊น€ํƒœ๊ท  +๋ ์ˆ˜๋ก +ํŠน์ „ +##sy +๊ฐ•์ถ” +์—„์ˆ™ +์ตœ์Šน +๋ถ•์–ด +์นดํ„ฐ +ํ—ˆ์  +๋‚จํ•ด์•ˆ +๊ฐ€์‚ฐ์  +ํ•œ๊ธฐ +๋’ค๋ฐ” +๊ธฐํ‹€ +๊นƒํ„ธ +๋ถˆ์šฐ +์ฑ…์žฅ +์—ฐ์Šต์žฅ +์„ ๊ตฌ์ž +##๋ ˆ์ดํŠธ +ํŒฌํ‹ฐ +##oup +๊น€๊ฒฝ์ˆ˜ +์š”๋ฅด๋‹จ +๋ฒ ์Šค +##๋ฐด๋“œ +์•„ํ”„๊ฐ„ +์นดํƒˆ +๋ณ‘์žฅ +ํƒ€์ด๊ฑฐ์ฆˆ +์ด๋ถ +๋งค๋ฃŒ +์ผ์‚ฌ +##cess +๊ฐ€์ฆˆ +๋Œ์–ด์˜ฌ๋ ธ +๋ฐœํ•˜ +์žฅ์ค‘ +๊ฒฐ์  +๋‘˜๋Ÿฌ์‹ธ์ธ +์‰ฝ์‚ฌ๋ฆฌ +AK +๊ทน๋ณธ +์ด์–ดํฐ +##att +๊ณฐ๊ณฐ +YTN +ํ˜„์ข… +One +UHD +๋ฌผ๋ฆฌํ•™์ž +์ž ๋“  +##๋ž‘์นด +๋งˆ์ค‘ +๊ณคํ˜น +์‚ฌ์—…์ฃผ +๊ด€์Œ +ํฌ๋Œ€ +ํฌ๋ฐ• +##IM +์ผ€์ดํ‹ฐ +ํฌํš +์ดˆ๋ฏธ +๊ฐ€์ ธ๋‹ค์ฃผ +์ด๋‹ˆ +์—˜ํ”„ +##now +ํ…Œ์Šค +์ง€์ €๋ถ„ +๊ฐ–์ถœ +์ˆ˜์กฑ +๋ณ€ํ•œ๋‹ค +##์ผ์ฒด +๋‚ด๋ž€ +ํ™๋ฌธ +๊ฑธ์ž‘ +##ors +๊ฒ ๋‹ค +์œ„์„  +๋น ์ง์—†์ด +์ฒœ๋งˆ +Eng +์ˆ˜์กฐ +##์ถง๊ฐ€๋ฃจ +๊ณ ์˜จ +##obal +์‚ฌํšŒ์ƒํ™œ +๊นจ๋ฌผ +์ฐจ์ธ +์ƒค๋ฅผ +์ƒค์˜ค +์ˆ˜๊ธ +๋ฐ”๋ผ๋ณธ +์ƒคํ”„ +340 +์ž‡๋ชธ +์•ก์ž +ํ–‰ํ•œ +##๋ฆฌ์Šคํƒ€ +ํ–ฅํ•ด์„œ +์œผ๋กœ์„  +์—ฐ๋ฝ์ฒ˜ +##๋ถ€ํŒจ +๋™์ข… +ํ•˜๊ธ‰ +##RO +์‚ฌ์ฒด +๊ทœ์•ฝ +๋‚˜์™€์š” +ํ˜„๊ธฐ +์„ฑ๊ณตํšŒ +์‚ฌ๋ณธ +ํ† ๊ธฐ +๊ตฌํƒ€ +๋ฆฌ๋‰ด +์‹œ๊ธˆ +๋‹จ๋ฒˆ์— +์ˆ˜์„œ +์•ˆ๊ฐ„ +ab +๋ด„์ฒ  +๋ณด์ธ๋‹ค๊ณ  +๋„˜์–ด์„ค +์Šน์Šน +ํ•˜๋ฃจ๋นจ๋ฆฌ +์…€ํ”ผ +##์ง€์ˆ˜ +์ฐธ๊ธฐ๋ฆ„ +์–ด๋ขฐ +##ler +์›ํ†ต +##ility +๋น„์ž” +๋งŒ์žฅ์ผ์น˜ +ํ•˜์ด๋ผ์ดํŠธ +๊ฐ•ํ’ +ํญ์„ค +์˜ˆ์‚ฌ +์„ ๋Œ€์œ„ +์•ˆ๊ฐ„ํž˜ +ํž˜๋‚ด +์•ˆ๋“œ๋ ˆ +์ฐจ๋‚จ +๊ฒฐ์žฌ +์—ญ์ „์Šน +๊ฒฉํ™” +์žฅ์˜ +์„ฑํŒจ +๋ฌด์–ผ +##๋„ค์ด +๋งŒ์ „ +์ „์‹œ์‹ค +์‹œ๋ฌธ +์ˆ™์˜€ +์™•๊ถŒ +ํ’๊ด‘ +์•„ํ‹€ +์นด์‹œ +225 +ํ˜•์ˆ˜ +์ •๊ธฐ์˜ˆ๊ธˆ +๋‹คํ‰ˆ +su +##LA +##๋Š”์ง€ +์œ ์‹ฌํžˆ +ํ•˜์ฐฎ +๋ฐํ˜€๋‚ด +๋…ธํ๋ฌผ +๋™๊ฐ‘ +##oss +์นด๋‹ˆ +๋ฏธ์‹  +์†Œ๋ฆฌ์น˜ +๋ณ‘์›์žฅ +๋‹ค๋‹ˆ๊นŒ +์‚ฐ์  +๋ถ„์œ  +๊ฐ•์˜์‹ค +##๋ฌด์‹œ +์นซ์†” +205 +๋Š๋‹ท์—†์ด +##ett +ํ™ฉ์‚ฐ +ํ™ฉ์‹ค +๋งˆ๋ˆ„๋ผ +์ดˆ์œ  +๋ถˆ๋Šฅ +๋ฐœ์—ด +์• ๋„ +##์ž๋ฃจ +##์Š๋ฆฐ +์—๋“€ +์•„๋‹๊นŒ์š” +์๊ธฐ +๋ฉœ๋ผ +์ด๋ ฅ์„œ +์ถœ์—ฐ์ง„ +์ฐพ์•„๊ฐ„ +๊ธฐ์šฐ +์šธํŠธ๋ผ +๊ฐ€๋กœ์งˆ๋Ÿฌ +์„œ์•ฝ +๋ชจ์…จ +์ฐธ๊ด€ +ํ•˜์ด๋ฐ +๋‹ค๋‹Œ๋‹ค +์ฃผ์ • +์ˆ™์ฒญ +์—์ด์ „ํŠธ +์งœ์—ฌ +๋งน๋ ฌ +๊ฐ€์ฐฝ +๋“œ๋ ค์•ผ +์‹œํ™” +๊ฐ€์ƒํ˜„์‹ค +์•…๋‹น +์กฐ์œค +์ฒญ๋…„์ธต +๊ณต์„ +๋‚˜๊ฒฝ์› +์™„์น˜ +์ฆ๋ฐœ +๋‹จ๋ฐœ +๋ฐœ๋ผ๋“œ +๋†“์นœ +ํŒŒ์ด๋„ +##์ž์น˜๋‹จ์ฒด +์Œ€์Œ€ +๋‹ค๊ฐ€์„œ +๋ด‰ํ•˜ +์ˆ˜๋‚œ +์ธ๋‚ด์‹ฌ +๋ฏผ๋ณ‘ +๋ผ์น  +์ „๊ตฌ +ํ˜•๊ด‘ +##์ •๋ณด +##ํฌ๋“œ +์•Œ๋ ‰์Šค +##๊ณตํ•ญ๊ณต์‚ฌ +์–ด์šธ๋ฆด +TEU +๋ฌด๋ก€ +์ธ์Š๋ฆฐ +๊ฐœ๋„๊ตญ +์–˜๊ธด +์‹ ์‚ฌ์—… +์ž๊ทธ๋งˆ +##์ทจ์—… +๊ตญ๋ฆฝ๋Œ€ +๊ธฐ์•ฝ +๊ตฌ๊ฐ€ +์ง„๋ฃŒ๋น„ +ํ˜‘์ฃผ๊ณก +์‚ฌํ†  +์›ƒ๊ฒจ +ํ™”์—ผ +์˜ค์ฐจ +๊ณต์ƒ +๋ธŒ๋ ‰ +๋นผ์•—๊ธด +Phil +์ •์›” +##๋‹ค์šด +๋ฐ•์€ +##99 +์ „ํ•™ +์˜์ง„ +KDB +๊ด€์ „ +ํ›”์นœ +๋“ค์ธ +ํ”ผ๋ฆฌ +๋น„ํ–‰์žฅ +๋”ฐ๋Œ๋ฆฌ +์ ์ž„์ž +๋Œ€๊ฐ +ํ•œ์ž… +ํ™€๋ฆฌ +์›๋ฌธ +๊ฐ€ํžˆ +์œค์„ฑ +##88 +๋ถ„๊ต +ํŒจํ•œ +์ž์œ ๋‹น +##lect +๋ฏธ์ฆˆ +์—ญํ–‰ +์œ ๊ณจ +๋‚™๋งˆ +์ถœ๊ฒฉ +๊ณ ํ•™ +ํ™ฉ์ • +ํ”ผ์—๋ฅด +Cor +๋ผ๋ผ +์œ ์ถ” +๋ฆฌ๋น„ +๋ชจ๋นŒ +์ปฌ๋Ÿผ +์€์‚ฌ +์žก์ดˆ +ํ‰์› +๊ณ ๊ฐ• +์ธํšŒ +์ ์ƒ‰ +##ural +ํ•ดํ–ˆ +๊ฐ€๊ฒฉ๋Œ€ +์•Œํ”„์Šค +๊ณ„์žฅ +์ต์‚ฐ +ํ‘ธ๋ฅด์ง€์˜ค +๋ฌผ์”ฌ +๊ธฐ์ž๋‹จ +์™•๊ฑด +ํ•˜์ฒด +์• ํ‹€ +์•ˆ์ „๋ง +๊ทธ๋‚˜ +HS +๋ชฐ์•„์ฃผ +ํ—ˆ๋• +##๋ณด๋ผ +๋‚ด๋˜ +๋ชฉ๊ฒฉ์ž +๊ฐ๋ฃŒ +์ˆ˜์ € +๊ธฐ๋ฐœ +ํˆฌ๋ณ‘ +์—‘์†Œ +๋ฐœ๊ฐ +์ถ•์ „ +##์šฐ๋งˆ +์‚ผ์„ฑ์ƒ๋ช… +๊ฐœ์ธ์ฃผ์˜ +MA +์™ผ๋ฐœ +##์‹ฌ์žฅ +์–ด๋ฃจ +##๊ทธ๋ฆผ +๋…ธ๊ณ  +##๋žŒ์ฅ +์„ ํ’๊ธฐ +์ง๊ฐ +์€์ด +##ealth +๊ดด๋กœ +๋Œ์ง„ +์žฅ๋„ +์Šน์„  +๋ธ”๋ก์ฒด์ธ +ํœด์Šคํ„ด +ํ ๋ป‘ +๋ถ„ํ–ฅ์†Œ +์„ ์  +์กธ์—…์‹ +์‹ฌ์ƒ์ • +##๋ผ์ดํ„ฐ +์ธก๋Ÿ‰ +๋งž์•„ +์ธ์‚ฐ +์‹ ๊ธฐ๋ก +Char +์‹ ์ƒํ’ˆ +์ฃผ์•ˆ +ํ™”์› +์Šน์Šน์žฅ๊ตฌ +์‹œ์™ธ๋ฒ„์Šค +๊ฐ’์ง„ +์ง‘์–ด +ํ—ˆ์˜ +1895 +๊ทน๋ช… +๋ถ€์ •๋ถ€ํŒจ +##ํˆฌ์ž์ฆ๊ถŒ +์ž‘์—…์žฅ +CB +์„น์…˜ +๊ฐ€๋กœ๋“ฑ +##๊ฒ ๋‹ค +๊ตฌ์›” +์•„์šฐ๊ตฌ +์ตœํ˜• +ํˆฌ๋œ +๋ป”ํ•œ +๋‚˜ํด๋ฆฌ +๊ธฐ๋‹จ +๋ฐฉ๋ฅ˜ +๋‹ค๋‚˜ +๋ชจ์„ +์ฝœ๋ผ๊ฒ +์ผ๊ฑฐ +์—ฐ์„ +๋ฌด๋„ˆ๋œจ๋ฆฌ +War +๋‘ํ†ต +์• ๊ต +๋ชธ์†Œ +๋ผ๋‹ˆ +##12 +๊ฐ์†Œ์„ธ +์ขํžˆ +๊ตญ์ œ๋ฒ• +์†Œ๋น„์žฌ +๋ผ๋„ +๋ฌด๋น„ +๊ฒŒ์„๋ฆฌ +์—ญ์‚ผ +์—ด๋„ +##org +์ˆ˜๋ชฉ์› +ํฌ์„ +##์ง€๋งˆ +๋‹ค๋ค˜ +๋งค์‹ค +๊ตฌ์„ธ +์•ˆํ…Œ +์•ผ๋ง +์˜คํ”„๋‹ +๊ธฐ์ ˆ +์†Œ์œ ์ฃผ +ํŒŒ๋ž‘ +ํ‹ˆํ‹ˆ์ด +๊ตญ๋ถ€ +์ŠคํŠธ๋ฆฌ +โ—‹โ—‹โ—‹ +๋ฏผํšŒ +๋ƒ๋ฉฐ +๋น„์šด +1896 +์ž์ƒ +๊ฒฝํ˜‘ +์กฐ์‹ +๋ถˆ๋ณ€ +์นตํ…Œ์ผ +๋™๋‘์ฒœ +๋น™ํ•˜ +์ฒ˜์น˜ +OST +๋ฐ”๋นด +##๋ฆฐ๋‹ค๊ณ  +์• ํ‹€๋žœ +ํŒ๊ฒฐ๋ฌธ +๋งค๋ฆฝ์ง€ +๋ฐœ๋งž +์•„๋ฉ”๋ฆฌ์นด๋…ธ +ํ™ฉ๊ธ‰ํžˆ +์•”ํŠผ +๋Œ๊ณ ๋ž˜ +๋ณ„์นญ +์ผ๋ณธ์‹ +ํŽ˜์ดํผ +๊ธ‰ํ–‰ +์ŠคํŒ€ +ํ”ผ๋ถ€๊ณผ +ํญํŒŒ +๋ชฉ์„ฑ +๋‘๋“œ๋Ÿฌ์ง„ +์‹ ์˜ˆ +์ˆ˜์‹ญ๋งŒ +์งํ–‰ +๋ถ€์กฐ๋ฆฌ +๋ชฉ๋ˆ +์ •๋ช… +๋ถ™์—ฌ์ง„ +๊ตฌ๋‚ด +๋‹น์ง์ž +๊ตฌ์ˆ˜ +๋งน๋ชฉ +์†ก๊ฐ• +๋ช…์พŒ +์ˆ˜ํƒ +๋…ํ•ด +๋กœ๋˜ +๋ฐ”์ณค +##๊ด‘์„ +๊ต์œก์ž +๋ณ€๋ฐฉ +์˜ค๋ฅธ๋ฐœ +ํ๋ฅธ๋‹ค +์˜€์œผ๋ฏ€๋กœ +no +๊ทœ์Šˆ +ํ•œ๊ตญ๋…ธ์ด +๋ฌด์ฃผํƒ +๋ฒ„ํ‹ธ +์„ ๋ฐฉ +๋ฏธ์„ฑ๋…„์ž +์„ ๋Ÿ‰ +์ผ๊ถˆ +์•ก์ • +๋„ค์ด์ฒ˜ +up +์„ํƒ‘ +ํ•˜์•ผ +์นผ๋ฆฌ์ง€ +๋ฐ˜์ฆ +๊ตฌ์‹ฌ +์›ƒ๋„ +๋…ธ๋žซ +๋ ˆ์Šฌ๋ง +๋ถ๋‹ +##๋ฐ์‹œ +๋‚ด๋ ค์•ผ +๊ฐ์ถฐ +๊น€์–‘ +ํ”ผ์ธ ๋ฒ„๊ทธ +๋ถˆํƒœ +๋œฌ๋‹ค +๋„˜๋‚˜๋“ค +##๋„์‚ฌ +๋ฉœ๋ก  +๊ฒŒ์ž„์ฆˆ +๊ฒฐ๋ถ€ +Op +##์—๋‹ค +๊บผ์ ธ +WS +๋‚ฉ๋‹ˆ๋‹ค +๋ž˜๋ฏธ์•ˆ +ํ”๋“œ +์ด์ง +๊ณต์‚ฌ์žฅ +์ง€๋ฐฐ๋ ฅ +๋“ค๋“ +๋‹จํ•ฉ +์ฒ˜์ฐธ +##ald +##์š”์‹œ +๋ถ€๋„๋Ÿฌ์›€ +ํŒจ๋Ÿด +๋์žฅ +์ฒ ์› +๊ณผ์—… +๋ณผ๋ฆฌ +๋ถ„์‹  +##๋ฃŒํ’ˆ +2025 +์–‘๋ฐฐ์ถ” +๋„์ „์žฅ +National +์šฐ์Šน์ปต +๋ถˆ๋Ÿฌ์ผ์œผ์ผฐ +์ˆ˜๋ฌธ +๋ณด์—ฌ์„œ +##lex +์ตํžŒ +ํฌ๊ณ  +๋ง๋…„ +##์›จ์ดํŠธ +์„ค๋ฆฝ์ž +๋„์›Œ +์ง๊ณ„ +์• ํ”„ +๋‹น์ง +ํŽธ์ฐจ +๊ตญ๋ฐฅ +##ํ•„ํ’ˆ +๊ณ ์•ผ +์œ„์•„ +##๊ฐ€๋‚ด +comp +๋งˆ๊ฐ€ +๋ฆฌ๋ฉ”์ดํฌ +๊ฐ•ํ™”๋„ +์นด์šฐ +ํ•œ๋ช… +์ฐจ๊ฐ€ +์กฐ๊ฑด๋ถ€ +๋™๋งน๊ตญ +์ผ๊ตฌ +์ค‘๋„๊ธˆ +๋ฌผ์ƒ‰ +๋ชธ์‹ธ์›€ +##ํ…”๋ง +์‹ค์ƒํ™œ +##๋ ๊ฑฐ๋ฆฌ +์ „๋ฌธ๋Œ€ +๊ฒฉ๋Œ +๋ฒ„ํฌ +์Šคํ† ๋ฆฌํ…”๋ง +๋ฐฐ๊ด€ +ํŒ๋ช… +์ค‘์–ผ๊ฑฐ๋ฆฌ +##๋‚ ๋ฆฌ +Her +์‹œํ—˜์žฅ +##๋งˆ์นจ +์ค‘์ „ +##์žฅ์น˜ +์ค‘์•™์•„์‹œ์•„ +์•Œ์•„์ฃผ +๋ด‰์šฐ๋ฆฌ +์ƒํ•„ํ’ˆ +๋ถ€๊ฒ€ +๋•…๋ณผ +๋ฌด์‹œ๋ฌด์‹œ +๋ฌด๋“ฑ +##๋ณด๋ ˆ +##๊ฑฐ๋ฆ„ +๋ฏธ์ˆ ํ’ˆ +๊ธฐ์•• +fl +๋ชจ๋…€ +์šฐ๋ฅด +๋ฌผ๋ ค์ฃผ +##๊ธ€๋ผ +๋ฌด์‹ +์‹œ์— +##๊ณผํ•™ +์•ค๋”” +๋†์ˆ˜์‚ฐ๋ฌผ +์„ ๊ฑฐ์ธ๋‹จ +์ž…์ถœ +๋น„ํ‚ค +์ƒ์–ด +๋ฉค๋ฒ„์‹ญ +DTI +๊ดด๋ฆฌ +์ผ๋ฉด +ํŽ˜ํŠธ +๋ณด์ธ๋‹ค๋Š” +๋ณด์œ„ +์†”๋กœ๋ชฌ +์ „์œ  +๋•Œ๋งˆ์นจ +SH +์†Œํ’ +์‹œ๋ฐ” +์˜คํด +์ฒด๋ฆฌ +์„ ์ž…๊ฒฌ +์นœ์กฑ +ํ† ํŠธ๋„˜ +์‚ฐ๋™ +##ํŽ™ํŠธ +ํ—˜ํ•œ +ํ™”์†Œ +##๋‹ค๋‹Œ +##๋Ÿด๋“œ +์ง€๋ฐฉ์‚ฐ +๊ณผํ•™๊ณ  +๋ฐ€๋ฆฐ +๋– ์˜ฌ๋ผ +ํ•ด์™” +๋ฐฐํŠธ +์€์ปค๋…• +Mus +ํŽธ์ง‘์žฅ +๋๋‚œ๋‹ค +##rist +์ฟ ์›จ์ดํŠธ +์•กํ‹ฐ๋ธŒ +์น˜๋Ÿฌ์•ผ +ํšŒ๊ธฐ +##ject +์ค€ํ•˜ +์ž˜๋‚œ +๋ฌผ๋™๋Ÿ‰ +์ฐฝํ˜ธ +๋ณดํ—˜์—… +์ œ๋ฆฌ +##ata +๋ป”๋ป” +์Šค์™„ +##๋ฆฐ๋‚ด +๋‚œ๊ฐ„ +๋กœ๊ตฐ +์ฑ„์šธ +๋ง์ƒ +์ดํ•ด์ฐฌ +์ œ์šฐ์Šค +๊ฒฝ๋ฆฌ +##๋”ฉํฌ +์‚ฌํ•˜ +๊ฐ€๋กœ์ง€๋ฅด +๋‘ํ„ฐ์šด +##๋‚ด๋ ธ +์ฝœ๋ผ๋ณด +์ˆฒ๊ธธ +์™ธ์•ผ์ˆ˜ +##๊ธฐ๊ณ„ +##๋‘ฅ์ ˆ +์ ‘์–ด๋“  +๋Œ€ํŒŒ +๋†จ๋‹ค +๋“์‹ค +๋ง‰ํ˜” +##ํ† ํ†  +์ œ๋ฐฉ +์–ด๋ฆฌ๋‘ฅ์ ˆ +์ฒœ๋‘ฅ +๋Œ€์‘์ฑ… +๋Œ€์ฐจ +์ˆ˜ํ™”๊ธฐ +์ฐจ์ด๋‚˜ +์•ผ๊ทผ +์ •๋™์˜ +ํšŒ๋ถ€ +๋ถ€๊ฐ€๊ฐ€์น˜์„ธ +##๊ฐํ˜• +inter +๋กœ์ปฌ +ํšจ๋„ +๋‹ˆ์ผˆ +์‚ฌ์—…์†Œ +ํ‹ฐ์•„ +##๋Œ€์„ฑ +์ฝ”ํ”„ +์••๊ตฌ์ • +##ํ†ต์ˆ˜ +๋Œ€์žฅ์•” +##๋ฐ”๋ฆฌ +ํ—ˆํƒˆ +FT +์ง€๋งˆ๋Š” +์–ด์› +ํœฉ์‹ธ์˜€ +๊ตฌ๊ธˆ +๋‘๊ป +๊ณ ์ƒ +์ฃผ์น˜ +์ž์„ +์„ ๋‹ค +์•„์Šค๋„ +๋น ์ง„๋‹ค +์Šค์ผ€์ผ +๊ธ€๊ท€ +##ML +๋‚จ์–‘์ฃผ์‹œ +Bus +mg +๋ฒ•์‚ฌ +์ •๊ตํšŒ +์‚ฌ์ ˆ๋‹จ +๋„ˆ๋น„ +์ œ๋‹จ +์ง€๋ขฐ +์–ด๋ฆฐ์•  +์ฝ”๋ฆฌ์•ˆ +์žํ™” +๊ฐ•๋™๊ตฌ +๋น™์ˆ˜ +์ด๋Ÿ‰ +KDI +์—ฌ๊ณผ +๋ˆ„๋“œ +##์ œ๋ฅด +ํžˆ๋”ฉํฌ +๋ณธ๋ณด๊ธฐ +๋ชจ์„œ๋ฆฌ +์™€๋„ +๋ถ€ํ„ด +์ˆ™์ง€ +๋กœ์ง€ +์ปค์Šค +์ด์œจ +##ํ‘ธ๋“œ +์Šคํƒ€ํฌ +๋…์‹  +๋ฐฑ์ˆ˜ +์„ผํ…€ +๋ธŒ๋Ÿฐ์น˜ +##์•„์น˜ +์ˆœ๋Œ€ +##์ผ€์–ด +์ˆ˜ํ”„ +์†ก์ „ํƒ‘ +๋ฐ”์•ผ +์ผ์ „ +๋ฌธํ•˜ +์•Œ๋ ค์ค€๋‹ค +๊ณ ์ฒด +๋‚˜๋ฃจ +์ „์…‹ +๋ฌธํ•™๊ด€ +๋น™์ƒ +๊ฐˆ๋ผ์ง€ +์€์ˆ˜ +##์—…์†Œ +์˜ํ˜ธ +ํ”๋“ค๋ ธ +๋ถ™์žกํ˜” +ํŒ”๋ฆฐ +ํ˜ผ์„  +ํ›„๋ฐ˜๋ถ€ +์‚ผ๊ตญ์‚ฌ๊ธฐ +##ํ•ดํŠผ +์—ˆ์ž– +์ฒด๋ถˆ +##์„œ๋‹ˆ +##์ž„์ƒˆ +๋ช…ํ•˜ +์น˜ํ•˜ +ํผํŒ… +์œ ์Šค +์ดํ˜• +am +์„ ํšŒ +Rel +##EE +KLPGA +ํฌ๊ทผ +๋‚ด๊ฒŒ๋Š” +##uman +##ouse +๋ˆ„์ถœ +์œคํšŒ +๊ฐœ๊ฐ +๊ด€๋™ +๋ถ„๊ฐœ +์นด์ด์‚ฌ๋ฅด +๊ตํ–ฅ์•…๋‹จ +์ฑ…์ž„์งˆ +์ธ์  +์ด์ฒด +๋ˆ„์›Œ์„œ +ํ™œ๋™๋น„ +์‚ฌ์„œ +์ถ•์‚ฐ๋ฌผ +Pet +์šฉ์ ‘ +์ •ํฌ +์˜†์ง‘ +์›๋‚˜๋ผ +์ ์ณ +์–ด๋ฃจ๋งŒ +๋‹ค์œ— +ํ—˜๋‚œ +ํ™ฉํƒœ์ž +์‹ค์ปท +๊ตฌํ†  +ํŽ€๋”ฉ +๋งค๋ˆ +1913 +๋ฐํ˜€์•ผ +๊ทธ๊ฑธ๋กœ +ํ—ˆ์•ฝ +##๋ฐฐ์ƒ +์†์ˆ˜๊ฑด +์‚ผ๊ณ„ +์•ผ์ฟ ๋ฅดํŠธ +๋ชฉํ‘œ์น˜ +Gu +๋ป”ํ•˜ +##ํ† ํ”ผ์•„ +์žํƒœ +ํ™์ฒœ +##CA +์„ธ์•„ +##pt +๊ณจ์น˜ +๋‚ฎ์ž  +๋Œ€๋Œ€๋กœ +๋ผํŠธ +์˜€์Œ์„ +๋‚ฎ์ถœ +์˜์™•์‹œ +ํžˆ๋ฐ +๊ฒฝ๋„ +์‹ฌ์„ฑ +ํž˜๊ฒจ์šด +๋ผ์•ผ +์‚ฌ๋ฐ” +๊ฐ€๋• +์˜ˆ์šฐ +๋ณด์—ฌ์•ผ +์—…๋ฌด์ƒ +์†ก์น˜ +๊ณต๋ฆฌ +๊น€ํฌ์‹œ +์ž‘์—…์‹ค +๊ฐ€์•• +ํ—ฌ๋ฆฌ +๊ณ ๊ณ ํ•™ +์บ„์บ„ +๋ฆด๋ฆฌ +๋ฌด์นจ +์ฐํžŒ +ํˆฌ๋ฅด +์ˆœ์ „ํžˆ +##์Šฌ๋กœ +๋งˆ์‹ญ์‹œ์˜ค +์ ‘๊ฒฌ +์„ธ์›Œ์ ธ +์ œํ”„ +์ปคํ”ผ์ˆ +์••๋ก +์ผ๋Ÿฌ์ŠคํŠธ +##14 +์—์ŠคํŒŒ +๋ชฝ๋•… +๊ฒ€์—ญ +๋˜์งˆ +##25 +๋กฏ๋ฐํ˜ธํ…” +Hist +๋ถ€์‹ +๋‚ด๋ฏผ +์ดํ•™ +์Šคํฌ์ธ ํ† ํ†  +์ผ์œผํ‚จ๋‹ค +ํ™”๋ฌผ์ฐจ +are +๋‚™์› +์ˆœ๊ฒฝ +์ž ๋ณต +์ŠคํŽ˜์ธ์–ด +๊ทธ๋Ÿด๊นŒ +์„ธํŒ… +์น˜์›Œ +์ค‘๋ น +ํœด์–‘์ง€ +ํ”„๋ž‘์ˆ˜์•„ +ํฌํ„ฐ +๋„˜์ณ๋‚˜ +๋„๋งก +๋งจํ•ดํŠผ +ํ”์พŒ +##33 +๋นผ์–ด๋‚œ +Je +##๋กœ๋‹ˆ +๋‚˜์ดํ‚ค +์›”์„ฑ +๊พธ๋ช„ +์ฐน์Œ€ +๋ฐœํ†ฑ +๋ถ€๋”ชํ˜€ +์ง€์šธ +๊ท ๋“ฑ +๊นŒ๋จน +์ ์žฌ +##์ผ๋Ÿฌ +๋งˆ์น  +์ผ์† +##์œ ์‚ฌ +##์Šต๊ด€ +์ซ“๊ฒจ๋‚˜ +420 +๊ด‘์šฐ๋ณ‘ +์•„๋ณด +##์—ฐ๊ตฌ์†Œ +๊ณง์ด์–ด +##15 +๊ฑธ์ณ์„œ +JP +CES +##๊ตฌ์ด +ํŠธ์ธ +๋œจ๊ฑฐ์›  +์•„ํ”Œ +๋ฉ”๋ฆด +##์ถ•์ œ +ํƒœ์‚ฐ +์ง„๊ธ‰ +๋‹ต์•ˆ +##๋‚˜์Šค +์ผํŒŒ +na +์ฐจ์ฐจ +๋ฒ•๋ฆฌ +๊ฑฐ๋Š +##๋กœ์ด๋“œ +๋ชจ๊ต +๋ฒˆ์„ฑ +ํŽธํ•ด +๋ฝ‘์•„๋‚ด +๋ชจ์œ  +์ฒœ์ง„ +์„ฑํ†  +ํž˜๋“œ +##๋•์…˜ +๊ฒฌ๊ณผ +์ „๋ณ‘ +์ค„๊นŒ +ํ˜ผ์ˆ˜ +๋ธ”๋กœ๊ฑฐ +SSM +์‹ ํ†ต +์–ต๋ฅ˜ +ํ•œ์  +๊ฐ€์‹  +##๋ด‰์‚ฌ +๋ฐฑํ˜ˆ๋ณ‘ +์•„๋ธŒ๋ผ +ํ‰ํ™”ํ†ต์ผ +ใ†์ธ +์„์œ ๊ณต์‚ฌ +์—ญ์‚ฌ๊ด€ +์šธํ”„ +ํƒ€๋กœ +๊ฐ„ํŒŒ +๊ธฐ๊ด€์ฐจ +Sec +์•…์žฅ +์ด์„ธ๋Œ +GO +##evelop +๋กœ์›  +๋ฆฌ์ฝ” +์‹œํŒ +์ปคํŠธ +์ตœ๋Œ€์น˜ +##ํšŒ์› +๋ฐ˜๊ณ  +์•ฝ์นญ +๋ฌธํ™”๋ถ€ +ํผ๋ถ“ +์ง€์ผœ๋ณผ +์ง‘๋“ค์ด +ํฌ๋ง์ž +์Šคํ‚ค์žฅ +๋ฐ์ด๋น„์Šค +๋ถ€๋‹จ +##urn +์žก์•„๋‚ด +์Ÿ์ทจ +์ด์ฃผ๋ฏผ +์ฒœ์‹ +ํ•ฉ์ฐธ +##ํˆฌ์ˆ˜ +์™•์„ธ์ž +##๋จผ๋“œ +์‚ฌ์ฒœ +์œผ์Œ +์จ๋„ +์–‘์กฐ +์—ด์‚ฌ +์ •๋งฅ +์•ˆ๋ถ€ +##๋ฒผ๋ฝ +๊ด‘์•ˆ +ํ˜๋Ÿฌ๋‚˜์™” +๋ฏธํ‚ค +์ ๋ฆฝ๊ธˆ +๊ตญ๋ ฅ +์ผˆ๋ฆฌ +๊ถŒํ•œ๋‹ค +์ƒ๊ฐ๋‚ฌ +๋„์ง‘ +๊ฒ€์€์ƒ‰ +์ „๋„์‚ฌ +๋‚ด์ซ“ +๋ฌด์Šค +ํ›„ํ•œ +์™ธ๋ฒฝ +๋Œ€์†Œ +์ธํ”ผ +๋œป๋ฐ–์— +๋ฏธ์นด +์œ ๋ฏผ +##์œผ๋ฉด์„œ +๋นˆ์•ฝ +์ดˆ์ธ +๊ณ ์  +์ขŒ์ƒ +##omer +๊ฐ–์ถฐ์ ธ +๋‹ฌ๊ตฌ +ํŠธ๋ Œ๋”” +์ฃผ์„ฑ +์†Œ์šธ +##EM +์ €๋‹น +์•™์ƒ๋ธ” +ํ•œ๋ผ์‚ฐ +๋ฐง์ค„ +์• ๋ค +์ข…๋ฌ˜ +์‚ฌ์‚ด +ํฌ์ปค์Šค +๊น€๋• +์ž์„ฑ +์šฐ๋‘๋จธ๋ฆฌ +๋„์„  +์ง„๋ถ€ +์œ ์œ  +์ž์ž +##ction +##iff +##๊ฒจ์šธ +Stud +๋ฐ•์—ฐ +ํ—ฌ๋ Œ +ํ‹ฐ์ € +์–ด๋–ค์ง€ +์ฐธํ˜น +##์ฝ”์ธ +๋’คํ†ต์ˆ˜ +๋Œ€์น˜๋™ +๊ฐœ์šด +ํ”„๋กœ์•ผ๊ตฌ +์ฒœ์„ฑ +์ˆ˜๊ธฐ +##ature +๊ฐœ์นญ +๋ฏธํ™” +์žฅ๊ฐ€ +๋ฐ•์„  +์ฆ์› +์„ ํ›„๋ฐฐ +์ •์˜ +์ฝฉ๊ณ  +์†Œ์šฉ์—† +๋ฌธ๋ฌด +์‚ผ์„ฑ๋™ +์–‘ํ˜• +850 +ํ’€์–ด๋‚ด +ํˆฌ์˜ +๊ตฌ๋จผ +๋‹ˆ์•„ +๋งŒํ•œ +๋ฐ›์•„๋“ค์—ฌ์•ผ +๊ฐ€ํŒŒ๋ฅด +์˜ค๋ฆฌ์˜จ์Šค +1915 +๊ธˆ์ง€๋ฒ• +์ž ์ž  +๋งˆ๋ฆฌ๋‚˜ +ํ•™๊ตฐ +WW +๋“œ๋Ÿฌ๋‚  +Per +๊ธ€๋ž˜ +์‹์Šต๊ด€ +์ง์‹œ +์นด๋‹ค +๊ฐˆ์น˜ +์• ํ‹‹ +##๊ธฐ๋…๋ฌผ +๋ฐฑ์กฐ +##์œ ํ‚ค +๋ฐฐ๊ผฝ +์šฉ๋งน +ํŽผ์ณ์งˆ +CH +Pe +##Hz +์žฌ๋ฏธ์—† +๋‚˜๊ณ  +์–ป์–ด +๊ณต์–‘ +์ค„์—ฌ์•ผ +์ž„์ • +๋งฅ๋„๋‚ ๋“œ +๊ณ ๋ฐฐ +์ž…๊น€ +๋Œ€๋‚จ +ํ˜๋ฆฐ +๋‚ด๋ฆฌ๋ง‰ +๋ฒ ์ด์ปจ +ํƒ์ • +์‹ธ์šด +ํŒจ์ž +๋™์ผ๋ณธ +๊ฑด์ ธ +๋„ท๋งˆ +์ ˆ์„ธ +๋˜๋Œ์•„๊ฐ€ +ํ‰์ง€ +##๋‚œ๋ฐฉ +William +์ดˆ๋‹น +##์„ ์Šค +์†Œ๋„ +##ํ‘ธ๋ฅด +๋ถˆ๋ฐœ +ํ”„๋ž€์น˜ +์‹œ๋ชฌ +์ง€์•„ +ํ•ด์žฅ +๊ตญ๊ณต๋ฆฝ +์˜ˆ์Šค +๋„๋งˆ๋ฑ€ +๋ณธ๋‹ค๋Š” +๊ณ„๋ณด +##๋ฏผ์ฃผ์ฃผ์˜ +์„ ์–‘ +๊ฐ„๋ณ‘ +์„ฑ๊ฐ€ +##์ฒ˜๋ฆฌ +ISO +๋Š๊ธฐ +์›€์ผœ์ฅ +๋ช…์ž‘ +์ œ๊ณฑ +์ฑ…๋ด‰ +๋‚œ์ฒ˜ +์˜ค๋ž‘ +์„œ๋™ +์ผ์ˆ˜ +ํŽ€์น˜ +์›จ์ด๋ธŒ +๊ต์‹œ +์˜ค์˜ค +ํ˜ธ๊ตญ +IF +๊ตฐ๋น„ +๊ฐˆ์ง€ +๋ฆฌํ”„ํŠธ +์ตœ์ „ +ํฌ๋กฌ +##ited +์–ด๋‹ค +ํ•˜์‚ฐ +๋นˆ์ž๋ฆฌ +์‹œ์˜ค +๊ถํ•ฉ +๊ณ ์†์ฒ  +man +์„ ๋ฐœํˆฌ์ˆ˜ +์•™๋ฆฌ +์„ํšŒ +๊ตญ๊ต +๋‹ฌ๋ ค์™” +์ˆ˜๋‡Œ๋ถ€ +ํฌํƒ„ +์—ญ์ฃผ +๋„๋‹ค +##ays +๊ดด๋กœ์›Œ +๋กœ์ดํ„ฐ +์ƒ๋ก +370 +์‚ฌ๋ณ‘ +ํƒœ์ˆ˜ +ํƒ€์˜ค๋ฅด +๋Œ€์ง„ +์—ฌ๊ธด๋‹ค +๋•์ˆ˜ +์ฒœ์—ฐ๊ธฐ๋…๋ฌผ +์ปดํ“จํŒ… +์ฑ™๊ธธ +290 +๋„ค์ด์…˜ +์†Œ์งˆ +##agement +์—ฐ์•ฝ +NAS +ํ‰ํ‰ +ํญ์ฃผ +์œ ๋Œ€๊ต +์ด์–ธ +๋ผ์ด์„ ์Šค +๋ฐ”๋ž€ +๊ต์„ญ๋‹จ์ฒด +ํ™˜ํฌ +์ฐฐ๋‚˜ +๊ธฐ์‚ฌ๋‹จ +์ด๋ฐœ +์ž๋งŒ +๊น”๋ฆฐ +์กฐ์Šน +์ผ€๋ฏธ +๋ฐœ์•” +ํ•ญ์ฒด +ํ”ผ์‹  +์žฌ์ทจ์—… +ํ–ˆ์–ด์š” +๊ณจ๋“œ๋งŒ +๊ฒ ๋‹ค๋Š” +ํ™”๋‹จ +##sc +์šฐ๋ฆฌ์€ํ–‰ +์˜๋ฏธ +207 +ํŒ๋งค์ž +์ƒค์ด +ํƒœ๊ถŒ๋„ +ใ†์†Œ +##atch +๋„ท๋งˆ๋ธ” +ํ† ์„ฑ +##oint +๊ฐ€ํ•  +##๋ณต์Œ +##๋‹ˆํ‹ฐ +์ˆ™์ฃผ +์ •๋ฏธ +##ํ†ตํ™”๊ธฐ +##ํ†ตํ™”๊ธฐ๊ธˆ +๊ฐ€๋””์–ธ +๊ตฌ๋ก€ +์™„๊ฐ• +๋˜๋Œ์•„๋ณด +##๋‹ค๋ฐœ +##์•„์Šฌ +##์„๊ฑฐ๋ฆฌ +##๋…ธํ”„ +๋…๋‹จ +์‚ด๋ ธ +๋”ธ์•„์ด +##๋„๋‹ˆ์•„ +##ํƒœ์˜ +๊ฐ์ƒ‰ +์–‘๊ตฌ +Robert +์•„์Šฌ์•„์Šฌ +๋‹๋ณด์ธ๋‹ค +le +์„œํ˜„ +๊ฐœ์š” +์ˆจ์‰ฌ +1894 +์ธ์‹  +##ํŠธ๋‹ˆ +์—ฌ๊ฐ๊ธฐ +Life +๋ฐœ๋ฐ”๋‹ฅ +##๋Ÿฌ๋ธŒ +๋ถˆ๋ฉด +์ธ๊ณต์œ„์„ฑ +##โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +์ƒ์„œ +์‹œ์‚ฌํšŒ +๊ตฌ์น˜์†Œ +ํŠธ๋ ˆ์ผ +ํ†ต๊ณก +๊ฑธ์„ธ +##์ถ”์„ธ +๋ฉˆ์ถœ +๋ฐ˜์ถœ +ind +๊บผ๋ ค +๋ณดํ˜ธ๊ตฌ์—ญ +๋„๋กœ๋Š” +๋ˆ๋ˆ +์ง„์‚ฌ +๊ฒ๋‚˜ +์ณ๋“ค์–ด +ํ•ด์ „ +๋ง์‹  +##ym +๋ถ€์‚ฐ๊ด‘์—ญ์‹œ +์ธ์ง€๋ผ +์กฐ๊ณ„์ข… +์ •์ˆ˜์žฅ +๊ธฐ๋…ํ–‰์‚ฌ +๋ฒผ๋ฝ +๊ด‘๋ช…์‹œ +๊ตญ์ œํ†ตํ™”๊ธฐ๊ธˆ +๋ณธ๋ฐ› +์†Œ๋ผ +##ํŒฝ์ด +๊ทธ๋ฆฌ๋“œ +์ค‘์•™์ง€ +ํŠธ๋ผ์šฐ๋งˆ +๋”์—†์ด +ํ–‰์„ธ +์ถœ์ž…๊ตญ +๋กœ๋ง +##์ธ ํ‚ค +์‚ฌ๋ช…๊ฐ +์—„๋ฐ€ +๊ทธ๋ž˜์„œ์ธ์ง€ +์‚ฌ๋น„ +ใ†๊ธฐ +๋ง‰์•„ +ํ™”์ „ +##ii +๋Œ€๋ฆฌ์„ +๋งž๋จน +ํšŒ์œ  +๋„ค๋†ˆ +์–ดํ•ญ +foodstagram +๋ชจ๋ฆฌ์Šค +dis +๋Œ๋ด +qu +๊ฐ•์Šต +์‚ฐํ›„ +์ผ๊ทธ +์‹œ๊ตฌ +์˜จ๊ธฐ +ํ…œํ”Œ +ํ˜ธํ˜ธ +๋น„ํŠธ์ฝ”์ธ +ํ˜•๋Ÿ‰ +ํ”ผ์–ด +๋”์›Œ +##ํ•˜๊ณ  +ํ•˜๋…€ +๋ฌด์šฉ์ˆ˜ +์‚ฐ์„ฑ +์Šฌ๊ทธ๋จธ๋‹ˆ +์–‘๋ฐฉ +ํ•œ์กฑ +##์ž๋ฃŒ +##๋ž˜์Šค์นด +์—ฐ์—ฐ +์ง€์€ +๋ธ”๋ฃธ๋ฒ„๊ทธ +์‚ด๊ท ์ œ +์—ฐ๋ช… +##por +์ง€ํœ˜๋ด‰ +ํ•œํ™”๊ทธ๋ฃน +์•ก๋ฉด +์ธ๋‹ค +์™•๋ฆฝ +##์ €๋‚˜ +๊ธฐ๋กœ +๋ผ์ด์Šค +๋ถˆ๊ธฐ +๊ธ€๋ผ์Šค +๋ž€์Šค +์†ก๋‚˜๋ผ +##res +Inter +์ž๋ฐ” +๋“ฑ๋Œ€ +์ง€๊ธ‹ +์‹œ๊ฒฝ +์œ ๋ผ์‹œ์•„ +##๋ผ์ง€ +์†ก์‹  +##uction +๋…ธ๋ ธ +์„œํด +##์‚ฌ์ง„ +์–ด๋ฅ˜ +์žฅ๋ก€์‹์žฅ +์•…๋ณด +##์•ผ์Šค +##alk +๋ณ„๊ฑฐ +๋ชฐ์•„์น˜ +๋น™๊ทธ๋ ˆ +๊ทธ๋‚˜์ €๋‚˜ +์Šค์นผ +ํด๋ ˆ์ด +์˜น์ง„ +์ฑ„์‹ +์„ฑ๊ณฝ +์ž…ํžŒ +๊ณจ๋ฐ˜ +์ˆ˜์˜ +๊ทน์ž‘ +๋‚ฉ์ž‘ +๋ ˆ์ดํฌ +์‹œ์ฐฐ +์ด๋™ํ•˜ +์œ ํƒœ์ธ +ใ†๊ณต +์—์›Œ +ํ”„๋ Œ์น˜ +DD +์„ฌ์ง„ +Part +์ŠคํŒŒ๋ฅดํƒ€ +๊ฐœ์šธ +์›๋ก  +๋ฏธ๋ผ +์˜ค์ธ +๋ก ๊ฐ€ +์ „ํ•จ +##๊ฐ€์Šด +HP +์ผ์ถœ +์œ„์ฆˆ +๋ฌด๊ฑฐ์›Œ +๋กœ๋ณด +์˜ค๊ฐ +##๋ฐ˜์ฐฌ +๊ต์œกํ•™ +์†ํ•ด๋ฐฐ์ƒ +๋˜๋ฌผ +๋ฐ˜์ฃผ +์ •์šฉ +์™ธ์‚ผ +๋ถ€๋“์ด +IPO +๋ฌธํ™”๋ฐฉ์†ก +์Šคํƒ ๋ฆฌ +์•ˆํ…Œ๋‚˜ +์ผ๋ชฐ +##๋ ํฌ +##์—์–ด +##tis +ํŒํŒ +๋‚ด๋†” +๊ณ ํŒŒ +๋„๋ฏธ๋‹ˆ +์•”์„ +์œ ๋ช…์„ธ +๊ฐ’์‹ผ +ํ›„์•ˆ +ํ”Œ๋Ÿฌ +๋ฉดํ•˜ +์›…ํฌ +ํŽธ๋„ +์ฆ๊ฑฐ์›  +WHO +##๊ฒŒํ‹ฐ +์ค‘์ง•๊ณ„ +ํ˜ธ๋ฐ˜ +๊ฐ•ํฌ +์ •์•ฝ์šฉ +์ƒ๊ฐ€ +์ดํšŒ์ฐฝ +์„œํƒœ์ง€ +๊ฐ•์šฐ +๋ฐฐํ•ฉ +GPS +ํ‚จํ…์Šค +์ง์„ ์ œ +์„ฑ๋…„ +์‡„๋„ +##ใ†ใ† +##ํŠธ๋กœ์ดํŠธ +๊ธฐ๊ฐ„์ œ +์ฃ„์•… +์ถœํ•ญ +๋Œ€์›๊ตฐ +๋Œ€ํŒจ +์ข…์ฐฉ +๋งˆ๊ทธ๋„ค +์–ด์ˆ˜์„  +๋†“์น  +์ง‘์•ˆ์ผ +๋ฐœ๋ +ํ‹ฐ๋ธŒ +##LE +๊ฒฐ์—ฐ +ํ‰ํ–ˆ +์ง€๋ผ +๋ง๋ผ +๋””ํŠธ๋กœ์ดํŠธ +##์ •์› +##ํ”„๋ ˆ์Šค +์ƒŒ๋”” +##66 +##๋ˆ„๋ฆฌ +์˜๋ฝ +480 +๋ฒ ์ด์Šค๋ณผ +ํ–ˆ์œผ๋ฏ€๋กœ +์˜ฅ์Šคํผ๋“œ +##๊ฑฐ๋ฆผ +ํŠธ๋ ํฌ +์ฝœ๋ฆฐ +##์ž์น˜๋ถ€ +์น˜์š• +๋‚ ์”ฌ +๋ช…๋ž‘ +ํœ˜์ฒญ +๋‹จ์ˆ˜ +๊ณ„์‹ญ๋‹ˆ๋‹ค +๋ฐฉ์‹ฌ +ํ–ˆ์–ด์•ผ +๊ตณํžˆ +๋น„์˜ค +์ตœ์ธ +์‚ผ์„ฑ์นด๋“œ +##II +ํฌ๋ฅด๋…ธ +ํ–‰์ •์ž์น˜๋ถ€ +์›๋ฆฌ๊ธˆ +๋ผ์ด์˜จ์ฆˆ +์—ญ๋‚ด +์™ธ๊ตํ†ต์ƒ๋ถ€ +๋ฏธํˆฌ +##์Šค๋Ÿฝ +ํž˜์ฐฌ +๊ฐ€๋ฆด +##์กฐ๊ฐ +์†Œ๊ธ‰ +ํ…Œ์˜ค +๋‚˜๋ˆˆ๋‹ค +์ˆ˜์‹ ๋ฃŒ +##๋ณด๊ธฐ +๋ณ€์ฒœ +์˜๋“ฑํฌ๊ตฌ +๋ฆฌ์–ผ๋ฆฌํ‹ฐ +๋ฐฐ์‹ฌ์› +๊ฒฝ๋‚จ์€ํ–‰ +new +์—ญ์Šต +์—ฐ๊ณ ์ง€ +์กฐ๋ณ‘ +๋‚ญํŒจ +ํŠธ๋กœํŠธ +์†Œ๊ณต +๋ฎ์—ฌ +์ด๋ด‰ +##ํ™”ํ•™ +##ole +๋ˆ๊ธฐ +์ฃผํฌ +์Šน๋ถ€์ˆ˜ +์ค€์„ค +์กฐํฌ +๊ถŒํƒœ +๊ฐ€๋ฆฌ์ผฐ +๋ฌด์˜ˆ +##์šฐ์ฃผ +์ˆ˜์€ +##ger +๊ณผ์†Œ +๋ง์ฝ +๋‚จ๋ฐœ +๋ณ‘ํ’ +์–ด์—ฌ +ํ•˜๋ชจ๋‹ˆ +์†Œ์‹ฌ +์™ธ๋ฌด์„ฑ +๋น„์ฒ  +๊ธฐ์™• +##์ถ•๊ตฌ์—ฐ๋งน +##์„ธ์Šค +ํ˜„ํŒ +๋‚ธ๋‹ค๋Š” +์˜ˆ์˜ˆ +๋ธ”๋ฃจํˆฌ์Šค +์—ด๊ฐ• +์ฐฌ๋ฐ”๋žŒ +์™ธ๊ณ„์ธ +YG +๋งค์ถ˜ +์„œ์ดˆ๋™ +๋ฐฉ์†ก๋ฒ• +๋ฐœ๋‹จ +USB +๋‹จ์„  +์žฅ์ž‘ +์‚ฌ์€ํ’ˆ +๊ทธ๋กœ +์œ ํ˜•๋ฌธํ™”์žฌ +ํšŒ๊ณ ๋ก +๋ฐ˜๋“ฑ +์ง€๋„๋ ฅ +##๋ชฐ์ด +์™ธ์ž +##๋ ˆ๋ธ +##์šด๋ฐ์ด์…˜ +๋ฆฌ์•ˆ +๋ฐฑ๊ณผ +ํŒŒ์šด๋ฐ์ด์…˜ +๊ธฐ์น˜ +per +๋ฒŒ๊ธˆํ˜• +OP +๋ณด์ด์ฝง +์ดˆ์‹ฌ +๋ ˆ์ฆˆ +ํ† ํ•˜ +์ˆจ๊ฒฐ +์ฐŒํ‘ธ๋ฆฌ +์—ฌ์ญค +ํ›ˆ๋ จ์žฅ +๊ณ ๋• +์Šคํƒ€์ผ๋ฆฌ์‹œ +๊ณต๊ฒฉ๋ ฅ +์ฐธ๊ณ ์ธ +๊ธฐ๋ฆฐ +ํ˜ธ์  +NHN +์ฐจ๋ ธ +Econom +๊ฒฉ์ƒ +ใ†๋Œ€ +์•…๋‹จ +๊ด€๊ณต์„œ +์„ฑํผ +๋ชจ์‹  +๋ฌผ๊ฐ +์นจ๊ตฌ +##๋ฐ”์œ„ +์ œ์•ˆ์„œ +##๋ถˆ์ด +์•…ํ•œ +##ese +๋ฐ‘๊ฑฐ๋ฆ„ +ํ˜ˆ์„ธ +์ „ํ•ด์ ธ +๋ฏธ์ผˆ +๋ถˆ๋ฉธ +์—ฌ๊ณ ์ƒ +๊ตํ™” +๋„๋ฅด +์„ค๋ ˜ +๋ฐ•์˜์„  +์žฅ๋ณธ +ํ•˜์ด๋ฐ๊ฑฐ +Mod +๋ฉง๋ผ์ง€ +##๊ณก๋ฆฌ +##์ž„์ˆ˜ +๋ธ”๋ฃจ์Šค +์˜ค๋ฆ„์„ธ +์šฐ๊ธฐ +๋‹ท์ƒˆ +๊ถŒ์„  +๋ผ๋„ค +๋ฐฑ๋ฏธ +##๋ผ๋ฉด +GP +ํŒจ์น˜ +๋ชจ์…˜ +๋ณผ์ผ +์„ฑ์ž +์•ˆ๋ณ‘ +##๋ ค๋ฉด +##๋‹ˆ์น˜ +๋ถ€๋ฅด์ง– +๋ณ€๊ธฐ +๋ฌด๋“œ +์‹ค๋ก +ํ”์พŒํžˆ +๋ฐฉ์ฒญ +ํ”ผ์นญ +๋ฌด๊ธฐ๋ ฅ +๋ฒ ์ด์ปค๋ฆฌ +๊ฐ๋„ +๋ฐ๊ธฐ +์žฅ์ง€ +๋งˆ์Šคํ„ฐ์Šค +##์กฐ๋ฅ˜ +ag +๋…ธํ‚ค์•„ +์ดฌ์˜์žฅ +ํ—Œ์žฅ +##ํŒŒ๋‹ˆ +##ated +์พŒ๊ฑฐ +##ํœด๊ฐ€ +๋ฐ˜๋“ฏ +ํ…Œํฌ๋‹‰ +SKT +์ œ๋ฉ‹ +๊ด€์„ธ์ฒญ +์„ ์ˆ˜์ดŒ +##ius +##์ฒญ์‚ฌ +2400 +๋ช…์ข… +์˜ค์•„ +๊ฐˆ์•„ํƒ€ +ํ• ๋ ค๊ณ  +์—ฌ์‹คํžˆ +๋ถˆ๊ฑฐ์กŒ +##์†Œ๋…„๋‹จ +๊ธฐ์Šญ +์˜ค์ • +๋ถ€์น˜ +์œ„์‹œ +##๋ณด๋ƒˆ +comm +๋ฉ”์Šค +์ƒํˆฌ +์†ก์‹œ +์ด๋ณ‘ํ—Œ +์•”์„ธํฌ +ํ•จํ‰ +์ธ์ค€ +์‹œ๋ผ +##์˜ํ–ฅํ‰๊ฐ€ +์บ์น˜ +๋‚ด๋ ค์„œ +ํŒŒ์ „ +์š”์ง +์ œ๊ฐ• +์ง„๊ตฐ +๋ฌด์‹ฌ์ฝ” +๋ด‰ํ•ฉ +์œ ์›” +๊ฐ๊ธ‰ +๋‹ฅ์นœ +์š”์ผ +##๋ฆฌ์ง€ +์†Œ๋น„์„ธ +๋ถ์„œ์ชฝ +์นด์ธ +๋ฌด๋งˆ +##๊ทธ๋ž˜ +์šด์ „์„ +์—‰ํ„ฐ๋ฆฌ +๋ถ€๋ ธ +๋„์ˆ˜ +๋กœ์ง +์ˆ˜๊ฐœ์›” +๋‘๋ง +์ •๋ ฅ +##์„œ๊ธฐ +##์ถฉ์ œ +CU +##๋งˆ๋‹ˆ +do +์•„๋ฐ”ํƒ€ +์‘์ง• +ํ˜๋Ÿฌ๋‚ด๋ฆฌ +์‚ฝํ™” +์‚ฌํšŒ๊ด€ +์ธ๋ถ€ +ํ‚ค์›€ +##17 +##์ž”์น˜ +๋‹ค์ด์˜ค๋“œ +๋งค๊ฑฐ์ง„ +๋‚˜๊ณ ์•ผ +์ข…์ฃผ +##55 +์ง„์ฃผ์‹œ +์˜์œ ๊ถŒ +ํ•˜๊ฐ• +ํ™˜๊ฒฝ์˜ํ–ฅํ‰๊ฐ€ +๋ฒฝ์ง€ +๋‚˜๋‹ˆ +๋ฌด์ž‘์œ„ +ํ†ต์นญ +##๋™๋ฌผ +์ผ๋ ‰ํŠธ๋กœ +Ro +ํ‡ด์  +##EST +๊ธˆ์ฒœ +๋…ผ์‚ฐ +์„ธ์ฒ™ +ํ๋ฅผ +ํŒฝ๋ฐฐ +์ถ•์กฐ +##uck +Dis +##์œ„๋Œ€ +SI +๋‚จ๋Œ€๋ฌธ +๋ฒ—๊ธฐ +์ œํ’ˆ๊ตฐ +๋‹ˆํ˜ผ +๋ฐ˜ํ–ฅ +๋ถˆ์ด +ํ…Œ์Šฌ๋ผ +๊ฑฐ๋™ +ํด๋ฆฌ์–ด +ํƒœ์šด +์ „๋ฌธ์ง€ +์ œ๊ฐˆ๋Ÿ‰ +ํ™”์ฒœ +##ํƒ€๊ธฐ +ํ–‰๊ฐ +๊ฐ€์„ฑ +##iety +๋ถ€์–‘์ฑ… +##์ง„ํฅ +๋ ˆ์ด๋ธ” +์•ŒํŒŒ๋ฒณ +์ˆ˜์‚ฌ๋Œ€ +ํ‡ด๊ฐ +##๋งˆ๋ฅธ +์ง„์„ฑ +์œ ๊ธฐ์ฒด +ํ„ฐ๋ฌด๋‹ˆ์—† +๋ฒ„๊ธˆ๊ฐ€ +๊น€ํ•œ๊ธธ +์ €๋ฌผ +๋„์ฟ ๊ฐ€์™€ +๋‚ด๋ ค์ง„ +๋‚จ๊ธธ +๋ฐฉ๋ฌธ์ž +ํ•„๋ฆฝ์Šค +๋–ณ๋–ณ +๋ฐ˜์น™ +ํ•ธ๋“œ๋ฐฑ +๊ท€ํ–ฅ +๋ ˆ๊ณ  +๋ฌผ๋ฐฉ์šธ +์ˆœํ•ญ +์˜จ๋‹ค๋Š” +310 +๋‚ด์ง„ +๋Œ๊ฒฉ +์‹์Šค +๋ ˆ๋ฒ„ +๊ฐ•๊ณผ +ํ๋ ค +ใ†์„œ +์‹ฌํ +206 +##ํƒ€๊ณ  +์‹œ๋ฏผ์šด๋™ +๋ˆ„๊ตด +##๋ชจ๋กœ +์„ฑํ–‰ +์ค„์—ฌ์„œ +##ํƒ„๋ถˆ +ํ—ค์•„๋ฆฌ +##๊น๋‹ˆ๋‹ค +๋งค์‚ฌ์ถ”์„ธ +##ํ…๋„ +์‚ฐ์ฒœ +์•…์„ฑ +์œตํ†ต +์ฐ๋งค +ํ†ต์šด +๋–จ์–ด์ง„๋‹ค๋Š” +์•„ํด๋กœ +out +๋ฉด๋ฐ€ +์๋‚ด +##์งˆ์„œ +์ค‘๋…์ž +๊ฐ€๋ž˜ +##ates +์„ฃ๋ถˆ๋ฆฌ +๋“ฑ๋ฝ +์‚ผ๊ตญ์œ ์‚ฌ +๋”ํ•ด์ ธ +์ •๋ถ€๊ตฐ +๋น„๋ฐ€๋ฆฌ +์กฐ์ค€ +ํ˜„๋นˆ +์–ด๋”˜์ง€ +์‹ ๋‹ค๊ณ  +๋‹Œํ…๋„ +์‹๋ฃŒํ’ˆ +ํšŒ์ž +์ƒ์†์„ธ +์ด๋ถ„๋ฒ• +๋งค์‚ฌ์ถ”์„ธ์ธ  +๋„๋ฉ”์ธ +์ˆ˜๊ตฌ +์ถœ์‚ฌํ‘œ +๋ฒˆ๋‡Œ +##๋งน์ด +ํ”„๋ฃจ +์•„๋‚™ +์••๋ก๊ฐ• +##ness +ํ”„๋ž‘์Šค๊ตฐ +##fer +์กฐํญ +๋งˆ์‚ฌํšŒ +์ด์Šคํƒ„๋ถˆ +##์žฌํŒ +๊น€์„ฑํƒœ +ํšŒ์˜๋ก +์—”์ ค +๋ธ”๋ ˆ +๋ฐ˜์ •๋ถ€ +๊ถŒ์„ธ +์—๋ณผ +all +selfie +์•ฝ์ง„ +์‹ ํ•™๊ต +๊ตด๊ณก +๋ฌผ๊ผฌ +ํ’€๋ ธ +๊ตญํšŒ๋ฒ• +๋ฐฉ์ œ +๊ด‘๋ณต์ ˆ +๋ฒŒ์–ด์ง„๋‹ค +์ƒˆ์ฝค +SPC +์•„ํŒŒ์„œ +์ „๋“ฑ +๋ฌผํ‹ฐ +๋Œ๊ธฐ +์œ ์‹ค +๋ถ€์ ํ•ฉ +ํ‰์  +053 +๋ธ”๋ผ๋””๋ฏธ๋ฅด +์‹ ํ˜„ +์•„๋ฐ˜ +๋–จ์–ด์ ธ์„œ +์ธ๊ณ„ +์ค‘์ˆ˜ +์ค‘๊ณต +๊ณ ์ถง๊ฐ€๋ฃจ +๋ช…๊ฐ€ +์˜ค์„ฑ +์ฝ˜ํ…Œ +##iation +์ฒ˜์‹  +์ง€์—ญ์ฃผ์˜ +์•ˆ์“ฐ +๋‚˜์•ฝ +์ œ์ฐฝ +๊ธˆ๋น› +ํ† ์ต +๋“ค์—ฌ๋†“ +App +๋ฐ”๊ฐ€์ง€ +๋น„์–ด +์ ๋„ +๋นˆ๋ถ€ +๋งค์ˆ˜์„ธ +##์ฝ”๋ฅด +๋’คํ”๋“ค +##๋‹ˆ์–ธ +##ํšจ๊ณผ +๋ถˆ์šด +๊ณผํ•˜ +๋ชจ์ง„ +ํ˜ธ๋ชจ +๊ณต๋ฏผ์™• +๊ฐ๋ฏธ +๋ธŒ๋ฆฟ +ํ…Œ์ดํฌ +๊ธฐ๋…๋ฌผ +ํ–‰์ธ +๋น„๋ฌด์žฅ +์—ฐ์‚ฌ +##ํˆฌ์•„ +##๋ž€๋„ +์‚ด์ƒ +NA +์›์ข… +์‹ ํ˜ธํƒ„ +##ํƒ€๋‚˜ +๋ฏธ์™„ +1916 +##๊ฐ์„œ +์ด์žฌ์˜ค +ํ•œ๋‹ค๊ฑฐ๋‚˜ +๊ฑฐ์ณ์„œ +์ฐจ์•ก +ํ™ฉ๊ธฐ +##๋น„์šฉ +์Šค๋ฉ” +๋ฐฑ์„œ +์†Œ์‹œ์ง€ +๋„ค์Šค +๋‹ตํ•œ +์•„ํŽธ +์ด๋…๋ถ€ +์‹œ๊ฒŒ +๊ณ ์‚ +๋ธ”๋ž™๋ฐ•์Šค +์ •์ทจ +##oph +๊ณ ํ•˜ +๋ฐ๋ ค์˜ค +ํ•ฉ์ฒœ +์•ž๋ฉด +##๊ณต์ฃผ +์œ ๋‹ˆ๋ฒ„์„ค +์˜๋ณต +##๊ต๋™ +์žƒ์–ด๋ฒ„๋ ธ +๋ฒ•์กฐ์ธ +##yl +๋ณด์•„ +๋‹ซํžŒ +์ƒ์ถฉ +์ค‘๊ฐœ์‚ฌ +๊นŒ๋งŒ +๋„๋ ค +๋„๋ฉด +๋ฐ•์„ +ํ›„๊ถ +ํŽ‘ํฌ +##๋™์‚ฐ +์• ์› +์œ ์‹œ๋ฏผ +๋ฌผํ‹ฐ์Šˆ +##๋ฒŒ๋ฆฌ +๋™ํ˜ธ์ธ +๋™์„ฑ์• ์ž +๊ตฌ์ˆ  +๋’ค์ซ“ +ํฌํ‹ฐ +๋ผ์–น +๊ธฐ์šธ์ผ +๊ด€์€ +ํ”„๋ž‘์Šค์ธ +ํ”„๋ฆฐํŒ… +์ดˆ์†Œ +ํ–‰์ž +์‚ฌ์šฉ๋ฒ• +ใ†์—ฌ +๋ฒ”์–ด +๊ฒฝ์˜์ธ +BO +์ŠคํŠœ์–ดํŠธ +๋ชฐ๋ž๋‹ค +๋‹จ์–‘ +์•„๋”” +์ฒด์ฆ +์‹ ๊ด€ +์ฝค๋น„ +์˜ค๋™ +๋ฉ”๋ฅด์ผˆ +๊ฐ„๋‹ค๋ฉด +์กธ์Œ +๋ฌด์ˆ˜ํžˆ +์‚ฐํ˜ธ +์ผ์šฉ +๋‹ฅ์ณ +##๋Œ€์‹ +์นดํŽ˜์ธ +์ฝ”์Šคํƒ€ +ํ•ฉํ•ด +ํ˜„๋Œ€์ฆ๊ถŒ +๋‹ค๋ชฉ +๋ ˆ์ดํŠธ +๊ณ ์•• +๋‚ด์šฉ๋ฌผ +๋ฐ•ํ˜• +์ ‘ํ•ด +๋ผˆ๋Œ€ +๋ฐฉ๊ด€ +์ˆœ๊ฒฐ +##๋“์  +ํ•œ์‚ฐ +๋ฌธํฌ +์œ ๋‹ค +์ •์Ÿ +๋†’์ธ๋‹ค +##์ผ๊ด€ +##rain +ํ˜„๋ฏธ๊ฒฝ +ํœด์–‘๋ฆผ +ํ—ค๋”ฉ +์ฐฝ์‹œ์ž +์ง„๋‹ฌ +์นœ์ง€ +์ฐจ์ž…๊ธˆ +๋ฐฉํŽธ +๋ ˆ์ด๊ฑด +๋ถ„๊ฒฝ +์šฐ๋ฐฉ +##๊ธ€๋ผ๋ฐ์‹œ +๋ณผ๋ง +๋ฆฌ์ฆˆ +์‚ฌ์žฌ +๊ตํ™ฉ์ฒญ +๋„ค์ดํŠธ +๋ณ‘์˜ +์•ž์žฅ์„ฐ +์• ํ˜ธ๊ฐ€ +์ ๋‚˜๋ผ +์„œ๊ธ€ +##์ฐฐ์ฒญ +๋‚ ์•„์˜ค +๋นจ๊ฐ• +์—ฐ์ž‘ +ํ™”์‹  +์œ ๋ชจ์ฐจ +์žฅ๋ ค๊ธˆ +๋ด‰์ฐฉ +์˜์–‘๋ถ„ +๋– ๋„ +ํ•˜์ˆ™ +๋‚ด๋ ค๊ฐˆ +๋ฐ›์•„๋“ค์ธ๋‹ค +๋ฐฉํƒ„์†Œ๋…„๋‹จ +๋ฐ•์ฒ  +์–‘ํ•ด๊ฐ์„œ +์ˆ˜๋„๊ถŒ +์‚ฐ๋ฆผ์ฒญ +ํ—ค์–ด์ง„ +๋กœํฌ +๋ฐ€๋ ธ +์ˆ ํƒ„ +##๋งˆ๋“œ +๋กœ์–„ +##bid +7500 +์ถ•๊ตฌ์žฅ +๋ฐ€๋ ค๋‚˜ +์˜ˆ์ฒœ +ํ”ผ์„œ +์–ป์–ด๋งž +๋‚˜๊ฐ€์‚ฌํ‚ค +๋…์ผ์ธ +๊ธฐ์„  +##์งˆ๋ € +ํ™”๊ณ  +ํ•ญ๋ฌธ +์ด์ •ํ‘œ +ํ›„๋ฏธ +์บ”๋ฒ„์Šค +ํ•ญ๊ณต๋ชจ +ํ‰์•ผ +ํ•‘์žฅ +์ดˆ์ค‘ +์Œ๋ฏธ +๊ณ ๋ฒ  +์„ฑ์—ญ +์˜์™ธ๋กœ +๊ฐ๋Œ +๋ฐฉ์žฌ +##๋ฐ”๋ผ๊ธฐ +JY +๊ธฐ๊ต +์‹ค๊ณผ +ํŒŒ๊ตญ +1890 +์—ฌํ˜ธ์™€ +๊ต๋ฏผ +##ํ™ฉํ›„ +์•ž์„ธ์šฐ +๋‚ฉ์„ธ์ž +ํŒจ์…˜์‡ผ +๋ฌผ๋†€์ด +##ious +๋ฐฉ๊ธ€๋ผ๋ฐ์‹œ +ํ•ธ๋“œ๋ณผ +์˜ˆ์น˜ +์žฅ๊ตฌ +๋ฌผ์–ด์•ผ +์ฒœ๋ฆฌ +๋กœ์ € +๊ตํ†ต์ˆ˜๋‹จ +๋ญ”๋ฐ +##๊พธ๋ฏธ +์ „ํƒœ +์„ฑ์˜ +์ขŒ์™„ +์ทจํ•ด์•ผ +์˜ˆ์ˆ ๋‹จ +๊ณต๋• +๋ชจ๋ž€ +ํŽœ์‹ฑ +๋“ค์–ด๋งž +์• ์นญ +208 +๋ณด์ž„ +##์ง€๊ฐ€ +##13 +๋Œ€์† +๋ณ€๊ตญ +ํƒ€์ง€ +๋ณ‘์ƒ +์žํšŒ +์ด๊ด‘์ˆ˜ +KGC +๊ณง์ž˜ +##๋ถˆ๋ช… +์˜ค๋กœ +์žํฌ +##75 +๋ฐฉ๋ฒ” +๋ฐฑ์—… +๋‹ญ๊ฐˆ๋น„ +๊ฐ๋‚ด +์‹ ๊ฐ„ +๋™์น˜ +์—ฃ์ง€ +##ํด๋ ˆ์Šค +##์ €์ง€ +์‚ฌ๊ฐ€ +์ต์Šคํ”„๋ ˆ์Šค +##45 +๊ธฐ๋Œ€์น˜ +์•ˆ๊ฒผ +์ฒญ๋‹ด๋™ +Eur +๋Œ€๋†“ +##์‹ญ๋ฆฌ +๋งˆ์นด๋กฑ +๊นŒ๋งฃ +์ ์ž– +์ด์Šฌ๋žŒ๊ต +ํ—ˆ๋‚˜ +์›์„ฑ +##์ •๋ฆฌ +๋‹ค๊ฐ€์˜จ๋‹ค +์ฒญํ–ˆ +##my +##๋ ˆํƒ€๋ฆฌ์•„ +MIT +##yle +์˜ค๋Œ€ +์กด๋ฆฝ +##cle +์ด๊ธฐ์ฃผ์˜ +์˜ฎ๊ฒจ์กŒ +ํŠธ๋ฆญ +์ฐพ์•„๋‚ธ +๊บพ์ด +๊ฐ€๋ฅด์น  +๋Œ€๊ฒ€์ฐฐ์ฒญ +cc +์ˆœํƒ„ +์‚ดํŽด๋ดค +์ž‡๋”ฐ๋ฅธ +์˜๋‚จ์ผ๋ณด +๋ฐ๋‹˜ +์ˆ˜์„ธ +์ถ•์ถœ +ํŒ์‹œ +๊ธฐ๋‹ค๋ฆฐ๋‹ค +๋น„์ง€ +##ํญํƒ„ +์šฐ๋ฐœ +์˜จ๋‹ค๊ณ  +๊ธด๋ฐ• +๋ฐ•ํ•œ +430 +ํ”„๋กค๋ ˆํƒ€๋ฆฌ์•„ +์•„์ˆ˜ +๋ˆˆ์ดˆ๋ฆฌ +ํ”Œ๋ž€ํŠธ +์ข…๋‹จ +์งˆ๊ฐ +ํ’๋ฌผ +๊ณจ๋ž +##๋งŒ์น˜ +๋ณต๊ณ  +์กฐ๋ฆผ +ํ˜‘์—ฐ +๋ด์•ผ์ง€ +ํ’€๋ฆฐ +์›”๊ฐ€ +##๋ฆผํ”ฝ +ํ†ต๊ฐ +๋ฒ•๋ฅ ์•ˆ +๊ธฐ๋ปค +ํŠน์‚ฐ +์—ด๋Ÿ‰ +๋…ธ์žฅ +์‚ฌ์›” +๋™๋™ +ํ•˜์ˆœ +ใ†์ „ +์ฒผ๋กœ +๊ณ ๋งˆ์›Œ์š” +๋งฅ์•„ +##ํฌ์Šค +์ „๋ฌธํ•™๊ต +๋ฐ”์นœ +##์ˆ˜์—ผ +์˜์–‘์ œ +๋– ๋ฐ› +์ด๋ณต +ํŠธ๋žฉ +Er +๋‹ฌ๋ ค์˜ค +##๋ˆˆ์น +๋ฏธ์›Œ +์žฅ๋ฌธ +์‹ ๊ฒฝ์งˆ +๊ณ ์†๋ฒ„์Šค +์•ฝ์ˆ˜ +๋– ์˜ฌ๋ ค +์Šคํ‚จ์ผ€์–ด +##๊ฒฝ์˜ +๋น ์ ธ๋‚˜๊ฐ” +์ผ€์ต +ํ†ต๋… +EV +์•ผ์‚ฐ +๊ดดํ…Œ +##์ฃผ๋ถ€ +์‹œํฅ์‹œ +์‹ ๋น™ +์•ˆ์„ฑ๋งž์ถค +๋˜์‚ด๋ฆฌ +์„ค์•…์‚ฐ +๋ฐฑ์ผ +ํ–ฅ์—ฐ +์„ธ๋ธŒ +์†๋งˆ์Œ +๋ณดํƒฐ +๋‹ต์žฅ +##ze +๋ฌผ์ค„๊ธฐ +๊ด€๋ฆฌ์ธ +๊ดด๋กœ์šด +๋ƒ‰์ฒ  +๋œป๊นŠ +๋ถ€์•ˆ +๊ณต๊ฒฝ +์–‘ํ˜„์ข… +์ฐํ˜€ +ํด๋ผ +์• ๊ตญ์‹ฌ +##์ž์‹ค +์ง„์‹ค๋กœ +##๋„๋Ÿฌ๋ฏธ +2200 +๋ถ€๋„๋Ÿฌ์›Œ +์ด๋• +์ ˆ์‹คํžˆ +๋ฏฟ๊ธฐ +๋ฐ˜์„ธ๊ธฐ +์•Œ์•„์ฐจ๋ฆฌ +์žฌ์กฐ +์ฑ…๋ฐฉ +##๋‚˜์ ˆ +๋ถ€์˜ +์ฃผ๊ฐ„์ง€ +๋ฌด์˜๋ฏธ +๊ณ ๊ธˆ๋ฆฌ +๋ด‰์ธ +์•”์šธ +ํŠน๊ณต +์„œ๊ท€ํฌ์‹œ +์ˆ˜๋ชจ +์‹œ์‹ +๊ฐˆ๋งค๊ธฐ +๋ฐœ์‚ฌ์ฒด +##ount +๋ ˆ์Šจ +##๊บผ๊ธฐ +์ตœ์ €์น˜ +์ฝ”ํŒŒ +๋น„ํ‹€์ฆˆ +์Šคํ”ผ์น˜ +##๋ฒ ์ด๊ฑฐ์Šค +์†ก๋…„ +๋„ํ˜ธ +๊ตฌ์Šค +##๋ฉ”์Šค +was +๊ฝƒํ”ผ +๋ฐ•ํ…Œ๋ฆฌ์•„ +์šฐ์„ ์‹œ +๋ผ์Šค๋ฒ ์ด๊ฑฐ์Šค +๋ ‰์„œ์Šค +๋ฌด์œ„ +##๊ทธ๋„ค +์†๋“ฑ +์‚ฐ์‹  +ํƒ„๊ด‘ +์Šˆํ‹ธ +๊ธฐ๋ง +๋„ˆ๊ทธ +๋น„์ณ +๋ถ„๋ง +KEB +์ผ€์ž„ +๋– ๋“ค์ฉ +์„œ๋จธ +1898 +๋‹น์‚ฌ๊ตญ +๋ถˆ๊ธˆ +์ง€๋ฅ˜ +Paul +##์ ๊ฑฐ๋ฆฌ +๋งž๋‹ฅ +๋ด‰๊ธ‰ +์ˆ˜ํ˜œ์ž +ํ•œ์˜ +๊น€์ถ˜ +์Šˆํ‹ธ๋ฆฌ์ผ€ +ํญ์–ธ +์ˆ˜์—ฐ +##๋ฆฌ๋‹ˆ +https +๊ธฐํฅ +์‚ด๋กฑ +์ •๋ ฌ +์ฐŒ๊บผ๊ธฐ +๋›ฐ์–ด๋‚ฌ +ํƒ€์ง„ +์ฒญ๊ตฌ์ธ +๊น€์„ฑ๊ทผ +ํฌ๋ ˆ์ด +##๋ณด๋ฆฌ +์ค‘๊ฐœ์—… +๋ฐ˜๋ฏธ +๊ฑธ์š” +๊ด€์ € +ํŽ˜์ธ +##์ •์ง€ +์Šฌํผํ•˜ +๋Š˜๋ฆฐ +๋ Œํƒˆ +์ˆจํ†ต +์žฅํ•™์ƒ +๊ต๋‘ +๋ณด๋ฃจ +๋ฌดํ˜•๋ฌธํ™”์žฌ +๊น€ํƒœํ˜ธ +๋ชจํ‰์ด +##์ €๊ณณ +๋ฒ”๋žŒ +์ด๊ณณ์ €๊ณณ +๊ณฐ๊ณฐ์ด +IN +์„ฌ๋œฉ +๋‚˜์šฐ +์ฐฝ๋ฐ– +๋ฉด์ฑ… +์ ์„ฑ +์„ฑ๋น„ +์ง€๋ฐ˜ +์„ค๋ช…์„œ +์˜ค์ž +๋ฏธ๋กœ +##์ค‘๊ตญํ•ด +์•ฝ์žฌ +์ œํ•˜ +ํ—ˆ๋‹ค +์™œ๊ตฐ +๋ธŒ๋Ÿฌ +1899 +์žํ™œ +๋„˜๋‚˜๋“œ +ํ•˜์ž +๊ณ„์ฃผ +๋ฏผ์ˆ˜ +๋จน์„๊ฑฐ๋ฆฌ +์ผ์กฑ +์ฐจ์ถœ +์ œ๋ฉ‹๋Œ€๋กœ +์ดˆ์ € +209 +ํฌ์„ +๋ฐœ์ œ +์ด๋™ํ•ด +์ƒ์—…์‹œ์„ค +์™ธ์‚ผ์ดŒ +์ž์กฑ +์–ด์šฐ๋Ÿฌ์ง€ +##ford +ํ•˜๋น„ +##ํ˜ธ์˜ +์กฐ์ค‘ +์œ ํ† ํ”ผ์•„ +ํŒŒ๋ฆฌ๋ฐ” +##eph +์ •๋ฐ• +๋งˆ์ผ€๋„๋‹ˆ์•„ +์ˆ˜ํ˜„ +์  ๋” +์ดํ™ +์ผ๋ณ‘ +instad +๊ฐœํ™” +##๋ณถ์Œ +๋ฐฑ์ข… +์ตœํฌ +##์ฐฝํ˜ธ +๋ฌธ๋ฌผ +๊ณ ์œ„์ง +๋‚™ํƒ€ +๊ฐœํ—Œ์•ˆ +๋ถ„์—… +์ฐœ์งˆ +##ํ˜๋Ÿผ +์ƒ์ž‘ +์•ˆ๊ตฌ +##์ผ๋ณธ +๊ณต๊ตญ +๊ธฐํ•ด +์ƒ์‡„ +์—ญ๊ณผ +์ง„์šฐ +##rit +##๋ฅด๋ฐ” +์ทŒ์žฅ +์žฅ์—„ +##์ธ๊ตฌ +ํ•œ๋‹ค๋“ ์ง€ +์ ˆ๊ทœ +๊ธฐ์„ฑ์šฉ +์• ํ‹€๋žœํƒ€ +๋ชจ๋… +๋ณต๋‹น +์ฟ ์Šค +ใ†์ˆ˜ +๊ณ„์„ธ์š” +๋‹ด์ˆ˜ +##์ง€ํœ˜ +๋งค๋„๋Ÿฝ +๊ทธ์ž +๋ชปํ•ฉ๋‹ˆ๋‹ค +์†ํŽธ +๊น€๊ธฐ์ถ˜ +๋ณผ๋ณด +๋ˆ„์ˆ˜ +์ƒ๊ณ„ +์›์–ด๋ฏผ +ํŒฉ์Šค +๊ณ ์–ด +##๋งˆ๋‹น +๋ฌธ์ฑ… +๋จธ๋ญ‡๊ฑฐ๋ฆฌ +์ฒญ๋ฃก +์ฒญํ•ด +๊ทธ๋ฆฐ๋‹ค +๋ธŒ๋คผ +์ง•๋ฒŒ +์ „์…‹๊ฐ’ +AG +##์ค‘๋… +๊ธˆํšŒ +๊ทผํ˜„ +ํŠธ๋ฆผ +ํ”ผ๋กœ๊ฐ +๊ธฐ๋ง‰ +๊ฒฝ๋ถ€๊ณ ์†๋„๋กœ +์ปค๋ฆฌํ˜๋Ÿผ +์ตœ๋ฉด +์—ฌ๋ฆ„ํœด๊ฐ€ +์ด๋Œ์–ด๋ƒˆ +์žฅ์œค +์›Œ์ฆˆ +์ƒํ•™ +๊ณต๋งค๋„ +##ents +๋งˆ์ด์Šคํ„ฐ +๋ฉ”์›Œ +๊นกํŒจ +5500 +์†๋ˆˆ์น +ํˆฌ๋ฐ์ด +##๋ฌด์Šค +์ทจ์ˆ˜ +##๊ด€๋ฆฌ๋ณธ๋ถ€ +์•„๋ขฐ +๋”ฐ๋ผ๋ถ™ +๋ถ„์ง€ +์˜ค๋ณด +##์ปคํ”ผ +๊น€์ฒ  +๊ทผ์ € +๋ฐ”๋นŒ +๊ฒ€๋‹จ +ํ‘ํ‘ +์ˆœ์ˆœ +๋ผ์ž„ +์‚ฌ์™€ +์‚ฌ๋ฆผ +##ํ–‰์ • +##oto +๋น„์„  +์œ ๋žŒ์„  +๊ฐœ์˜ +๋”ฐ๋ผ๋‹ค๋‹ˆ +๋ถ์Šค +์œ„๊ธ‰ +##์†๊ฐ€๋ฝ +์•ต๊ธ€ +๋ฌต์ธ +๋‹ค๋žŒ์ฅ +์„ธ๋น„ +๋น„์ˆ˜๊ธฐ +ํŒ๋‹จ๋ ฅ +์ปฌ๋Ÿผ๋น„์•„ +์ ํ•˜ +์–‘์•… +์ „๊ถŒ +์นด๋งˆ +๊ฒŒ๋ฆฌ +๋‹ด๊ฐ€ +๋ฐœ์› +๋น„์—ผ +ํ† ์‚ฌ +์‚ฌํ™œ +ํ•œ์ •ํŒ +##ru +๊น€์ˆ˜ํ˜„ +๋ฐœ์ฝ”๋‹ˆ +์˜จ๊ฑด +์•„์ด์Šฌ +๋กค๋ง +๋Š๋‹ˆ๋ผ +Serv +์•„๋ฒจ +##์„์ง€๋„ +์„ฑ์‹คํžˆ +๊ด‘์‚ฐ๊ตฌ +์ถœ์†Œ +ํ•จ๋ถ€๋ฅดํฌ +์‚ผ์–‘ +ํ—คํ—ค +์ˆ˜ํ˜ˆ +ํ•ด์•ผ์ฃ  +Des +๊ธˆ๋ฐœ +๋†’์—ฌ์•ผ +๊ตญ์‚ฐํ™” +์—ฌ์ง +์—ด๋ค +##๋ฐ”๊ตฌ๋‹ˆ +์„ธ์ดํ”„ +๋„๊ธ‰ +ํ•ญ๊ณต๋ชจํ•จ +๊ฐ•์‹  +๋ฉ”๋””์ปฌ +Techn +๋นˆ์†Œ +์ ์ฃผ +๋ฐธ๋ฅ˜ +์ƒ๋™๊ฐ +ํ‚ค์นœ +์˜๋ฃŒ๊ด€๊ด‘ +์นด๋ฅผ๋กœ์Šค +๋ถ€์—… +์›”๋“ฑํžˆ +์˜คํ˜ธ +์ฝ”๋กœ๋‚˜ +๋ณ€๋ณ„ +์†Œ๊ฒฌ +์œ ์ฑ„ +##์žฅ์ž +256 +์‚ฌํšŒ๋‹น +EM +๊ธˆ๋ฌผ +๋”๋ผ๊ตฌ +์žฌ๋ณด +์‰๋ณด๋ ˆ +์ฒซ๋ฐœ +CPU +์‹ ์šฉ์žฅ +๋น„๋Œ€์นญ +##โ”โ”โ”โ”โ”โ”โ”โ” +๋™ํƒœ +ํ™๋ช…๋ณด +๋ชจํ”ผ +Bo +์กฐ๋‹ฌ์ฒญ +์žฅ๋ณธ์ธ +##์Œ๋ฃŒ +์ฃผ์ค‘ +##์•„์กŒ +FO +์‹œ์Šน +๊ด‘์–‘์‹œ +์„œ๋Œ€ +##ํฌ๋กœ +์•„์ด์Šฌ๋ž€๋“œ +์ถ”๊ณ„ +pet +์ œ์ฃผ์‹œ +##๊ธธ๋™ +##ning +๋ฌผ๊ธธ +์ผ๋ก€ +์†Œ๋ฌธ๋‚œ +im +์“ฐ์—ฌ์ง„ +##์ˆœ๊ฐ„ +๊ณ„๋ง +ํ›„ํ›„ +##๊ณจ๋ชฉ +๊ฐ€์•ผ๊ธˆ +##์กฐ์ž‘ +์˜์‚ฐ๊ฐ• +์ž์œ ๋ฏผ์ฃผ์ฃผ์˜ +์•„๋ผ๋น„์•„ +๋ฐฐ์•„ +์งˆ๋ณ‘๊ด€๋ฆฌ๋ณธ๋ถ€ +๋ฌผ๋„๋Ÿฌ๋ฏธ +ํ˜„ํ˜น +๋ฐ•๋ณด +์ „๊ด‘ํŒ +์œกํšŒ +ํฌ์˜น +##lin +์˜ค๋‘ +๋ฐธ๋ธŒ +์—ฌ๋ณด์„ธ์š” +์—ฐํ™” +FR +๋„๋ฆฝ +๋นจ๊ฐฑ์ด +##์ดŒ๋™ +๊ฐ€์ž +๋ผ์˜ค +๊ฐ‘ํŒ +๋ณ‘๋™ +์ทจํ•œ๋‹ค +ํ•ด๋ฐ”๋ผ๊ธฐ +์ •์ˆ™ +##ํ•ดํ•˜ +์ธ๋‹จ +##TRA +๋น›๋‚ฌ +๊น€์˜์ฒ  +๋ธ”๋ฃจ๋ฒ ๋ฆฌ +์ตœ๊ฒฝ์ฃผ +##orth +ํ•ญ์•”์ œ +๋ฏผ์ฒฉ +์–ด๋ฆผ +์ข…์•„๋ฆฌ +##77 +์˜ˆํƒ +์ด์œ ์‹ +์ž”๊ธˆ +์ฒ ๊ด‘์„ +์ „์„œ +์•„๋ฐ˜๋–ผ +์‹ค์ง +ํ–‰๊ตฐ +๋ฟŒ๋ฆฐ +๋…ธ๋ก  +์•”๋ฒฝ +๋‹จ๋ฝ +๋”๋”” +์ฝœ๋“œ +์ฃผ์ˆ  +๊ฐ€์“ฐ +์ง€ํ•˜์ฒ ์—ญ +๋ˆˆ๋งค +์™œ๊ตฌ +ํฌ์ƒ๊ธˆ +UP +๋ฐฐ๊ณ ํŒŒ +๋น„ํ™” +๋กœ๋‹ค +๋งˆ๊ณก +์ˆ˜ํ•ด +์žฌ๋Šฅ๊ธฐ๋ถ€ +๊ฑท์–ด +๋ถˆ๋Ÿฌ์„œ +๋‰ด์ €์ง€ +์ข…์šฉ +##net +๋งˆ์ดํฌ๋กœ +ํ”ผ์กฐ +์ŠคํŒŒํฌ +์‹ ๋™๋นˆ +๊ฐˆ๋ฆผ +๊ฑฐ๋ฅด +๋ชป๋‚œ +์–‘๋ฆฝ +๋ฐœ๋“ฑ +๋ฆฌ๋”ฉ +์ธ์ƒ‰ +์•„๋ธŒ๋ผํ•จ +๋ฌปํžˆ +์–ดํ—ˆ +##๋‹ฌ๋ฆฌ +๊ฒฐ์ •์ง“ +๊ด€์šฐ +์—ฐ์ฒœ +์—์ŠคํŒŒ๋ƒ +Cal +์ข…ํŒŒ +mod +๋“œ๋ ค์š” +๊ผญ์ง€ +๋Œ€๋‚ฎ +๋‹ค๋… +์„œ์ฒœ +์žฅ์Šน +๋„์ž +์†Œ๊ทน์žฅ +์ด์ฑ„ +๋‹ด์š” +##์žฅ์ˆ˜ +๋Œ€์ฃผ๊ต +๋‹ค๊ทธ +์†ํ–ˆ +์—ฝ์„œ +์ฒœํ˜œ +##๋”๋“œ +id +์„œ๋ณ‘ +์˜ค๊ฐ” +์ง„๋‹ฌ๋ž˜ +๋นจ๋ ค +##์‚ฌ์žฅ +##์—์ดํ„ฐ +##์ฐฉ๋ฅ™ +์†ก๋ผ์ดํ„ฐ +์บ์„œ +์•ˆ์žฌ +์„œ์„ฑ +์ ˆํ•˜ +์ฒด๋ฒŒ +ํ‚ค์›Œ์•ผ +SN +๋‘๋ชฉ +๋ฒ ๋ฅดํฌ +๋“œ๋Ÿฌ๋‚ผ +๋Œ€ํ–‰์‚ฌ +์ดˆ๊ณ ์ธต +๋ณ‘๋“ค +์˜์ „ +๋ฏธ์นผ +์นด๋ฐ” +๋งŒ๋ฆฌ +๋จธ์Šค +๋ฐ์นด๋ฅดํŠธ +๋Ÿฌ์„œ +์—ฐ๋งˆ +##์ง„์ง„ +##์ฉŒ๋‘ฅ +์ฒœ์ผ +์ฑ…์ž„์ ธ์•ผ +ํŒฉํ† ๋ฆฌ +##๊ฒŒ์ดํŠธ +##ely +๋ถ€๋Ÿฌ์ง€ +ํ•œ๊ตญ๊ตฐ +์™„๋ฒฝํžˆ +๋นˆ๊ณค์ธต +์ œ์Šค์ฒ˜ +ํŒ”์ฐŒ +ํ•„์Šน +๋จธ๋ฌธ +ํ•ดํ”„ +##์†Œํƒ€ +๋ถˆํ†ต +์ด์˜ +๋ฐฉ์  +##์ฝฅํ„ฐ +๋ฐํžŒ๋‹ค +์„ธ์ผ์ฆˆ๋งจ +NASA +๋ถ€์›์žฅ +์Šค๋ชฐ +์˜ค์†ก +ํ–ฅ๊ต +๋งˆ์˜ค์ฉŒ๋‘ฅ +๋กœ๋ฆฌ +๋ณ„๊ด€ +๋ง๋ถ™์ด +๊ฐœ์ฐฐ +๋– ๋„˜๊ธฐ +์ƒ์—ญ +ํ•˜์ดํผ +์ž˜์ƒ๊ธด +##21 +##์นผ๋ฆฌ +์Šต์„ฑ +##27 +##ext +๋ฌผ๊ฐˆ์ด +##๋ฐ›์ด +Social +๊นจ์ง„ +๋นš์–ด์ง€ +ํ† ํƒˆ +์นœ๋ชฉ +##๊ฑฐ๋ฆฐ๋‹ค +์—˜๋ฆฌ์—‡ +ํ›„์‹ +๋‘๋“œ๋Ÿฌ์กŒ +์ปจํŠธ๋ฆฌ +๋‚˜๋‚ ์ด +๋ฐ‘๋Œ +##LO +๋ฏธ๊ตฐ์ • +๊ฐœ์ตœ์ง€ +ํŒŒ์ดํŒ… +David +์—ฐ์žฅ์ „ +##๊ณต์‚ฐ +์ฝ”๋…ธ +##ET +ํ—ค์•„๋ฆด +๋ถ€๋ ค +๋ฏผ๊ฐ„๋‹จ์ฒด +๋ช…์„ธ +์—ฐ๋ก€ +ํ–ˆ์–ด๋„ +๋‚œ์žฅ +Dr +์ž ๊ธฐ +##๋ฏธ์•  +๋ถ™์ผ +##food +๋งคํŠธ๋ฆฌ์Šค +์‹œ์ปค +JR +๋ฌธ๋งฅ +๋ฆฌ๋ฒ ์ดํŠธ +ํ”ผ์— +๊ต์ง +๋‚จ๋‹ค๋ฅด +๋ณธ๋“œ +๊ณ ๊นƒ +๋‚ ๋ผ +์กฐ์„ ์™• +์„ฑํƒ„ +๊ฑด์žฌ +์บ๋กค +์›จ์ผ์Šค +๋ฃจ์ฆˆ +ํ† ์ด +##ํ…Œ๋ฅด๋‹ด +์•„๋ฆ„๋‹ค์›Œ +์ž๋ ฅ +์ œ๋… +ํŒ์„ธ +์—ฐ๊ตฌ๋น„ +##ission +์Šคํ…Œ์ธ๋ฆฌ์Šค +๋ฆฌ์˜จ +์Šค๋ƒ… +##๋ฌธ์ œ +์œ ๋ณ‘์–ธ +##๋ฒˆ์Šค +๊ฐœ์ข… +##oth +##๋ž‘ํฌ +๋šœ๋ ท์ด +๋Œ€์ข… +ํ†ต์ง„ +ํ•„ํ•˜ +์ž„์žฌ +ํŒจ๋Ÿด๋ฆผํ”ฝ +๋ˆ„๋‹˜ +์œ ๋‹ˆํฌ +์ •๋ชจ +๋…ธ๋ผ๊ณ  +๊ฐ€์šด +๋นŒ๋ฆฌ์ง€ +##ํ˜ˆ์ฆ +์‹ ํƒœ +๋ถ€์œ ์ธต +๋ฉด๋ฉด +์• ์šฉ +ํšŸ์ง‘ +์•Œ์ฐจ +๋ฒจ๋ฒณ +๋ฐœํฌ +๋ถ€์ฃผ +์ผ์ ˆ +์œ ์žฌ์„ +์‹ ์ธ์™• +##์šด๋™์—ฐํ•ฉ +๊ทน๋ฝ +๋‹ค๋ฆ…๋‹ˆ๋‹ค +๋ฌ˜ํ•˜ +์„œํฌ +์ˆ˜ํŠธ +๊ณ„์‚ฐ์„œ +๊ณตํƒ +์–‘์‚ฐ์‹œ +๊ฑฐ์‚ฌ +๋“ค์ด๋Œ€ +##23 +๋ˆ์งˆ๊ธฐ +ใ…กใ…ก +๊น€๋งŒ +##24 +๊ฒธ์ง +์ตœํ˜„ +๋ฉ”๋“œ +์ €ํŽธ +ํ•˜๋ฌผ +๊ณ ๊ด€ +๋ฎ๊ฐœ +์†ก์ถœ +UC +์˜ˆ๋ฌธ +##๋ น๊ด€ +๋””๋ ‰ํ„ฐ +Rec +##๋ ŒํŠธ +##ํ‹ธ๋ฆฌํ‹ฐ +์ง‘์  +##28 +๋ฐ”๊ฒ +์›จ์ดํŠธ +ํ”ผ์›  +Ver +์‹๋„ +์นญ๊ธฐ +ํ•œ๋“ค +์ดํ™ฉ +์ง•์šฉ +์˜ท์žฅ +ํ”ผ์‚ด +๋ชจ์ข… +๋ฒ ์–ด์Šค +์ „์„ฑ +์Šน๋ถ€์ฐจ๊ธฐ +๋ฏธ๋„ค์†Œํƒ€ +์žฅ์ œ +##๋ฅด๋…ธ +์—๋ฐ€๋ฆฌ +์—ฌ๋ผ +์žฌํšŒ +ํ”Œ๋ž˜๊ทธ +gr +##์Šค๋Ÿฌ์šด +์—ฌ๋Ÿฌ๋ชจ๋กœ +๋˜์ƒˆ๊ธฐ +์š”์ถฉ +##์€ํฌ +์ฐพ์•„์˜ฌ +๊ธฐ์ œ +๋ฐœ๋ ˆ๋‹จ +์กฐ์–‘ +๋ฐ”ํ‹ฐ์นธ +์šฉ์•” +๊ด€๋ง +์‹ ์ฒœ +##ํ‹ฐ์‹œ +##๋ฆผ๋™ +๋…ธ๋™๊ณ„ +์™€์šฐ +์ „๋ฒ” +ํ๊ต +๊ธ€๋ฃจ +์œ ํ‹ธ๋ฆฌํ‹ฐ +์ œ์ฒญ +ํžˆํžˆ +rel +์ฒ ํŒ +๋‘๋“œ๋ ค +##์žฃ์ง‘ +์„œ๋„ +๊ทธ๋žœ์ € +ํŽธํŒŒ +๋นผ๊ณก +naver +Ma +ํžˆ๋ฐ์š” +๋ช…๋ฌผ +##์‹ค๋ฒ ์ด +๋„ค๊ฑฐ +๋Œ€ํ†ต๋ น๋ น +##์‹ค๋ฒ ์ด๋‹ˆ์•„ +๋ณต์•ˆ +์‚ด์‚ด +์–‘์น˜ +๋ฒ•์›์žฅ +Int +์•…๋ช… +๋ถ€์„œ์ง€ +์ด๋„ˆ +๊ณตํ•™๋ถ€ +๋ฐœ์‹  +์ด๊ณผ +ํฅ๋ฏธ์ง„์ง„ +๋ฐ‘๋ฐ˜์ฐฌ +ํƒ„์› +##๋ฐ”์ด๋ฒŒ +ํ”„๋ž€์น˜์Šค์ฝ” +์ง„์‹œ +๋งค์„œ +๋ฝ‘ํžŒ +๋‹น์€ +ํ•œ๊ฒฝ +##olution +ํŠ€์–ด๋‚˜์˜จ +##์ž–์•„์š” +๋ฏธ๋„๋Ÿฌ์ง€ +ํ”Œ๋Ÿฌ๊ทธ +ํ—Œ๋ณ‘ +์„ฑ์š• +ํŽœ์‹ค๋ฒ ์ด๋‹ˆ์•„ +##ther +๊น€์ค‘ +์ตœํ•˜์œ„ +์ค‘๋ž‘ +์ฒด์Šค +๋ผ๋ฉ˜ +ํ—ค์น˜ +ํ•ดํ”„๋‹ +๊ฑฐ๋‹ +๋‚จ์„œ์ชฝ +##๊ฐ๋… +RPG +##์ •์ฑ… +##์ณ์•ผ +๊ด‘์ฑ„ +##ency +์ œ๋„ˆ๋Ÿด +ํ™”์›จ์ด +##fl +์˜์•„ +์ ‘์ฐฉ +ํ˜•์ฒด +๋ง‰์•„๋‚ด +์Šคํƒ ๋”๋“œ +ํ”„๋ ˆ๋“œ +๋กœ์—ดํ‹ฐ +์ดˆ๊ฐ€ +##์น˜์‚ฐ +ํ›‘์–ด๋ณด +๊ณ ํ•ด +foodp +์Šน๋ถ€์กฐ์ž‘ +๋‚œํญ +๊ธฐ์ƒ์ถฉ +ํ•ด์ด +๋น ๋œจ๋ฆฌ +##ํ”Œ๋ฆญ์Šค +ํ”„๋žญํด๋ฆฐ +##์„ํฌ +์šฐ๋ฆฌํˆฌ์ž์ฆ๊ถŒ +ํ•™๋ฒŒ +215 +ํ”์น˜ +##formation +##16 +์„€๋„ +๋ฐฐ์šด๋‹ค +๋„ทํ”Œ๋ฆญ์Šค +๋ถˆ์‹ +##ye +##์ฒจ๋‹จ +์–ด๋กœ +์ธํ™” +ํ†ต๊ด€ +ํฉ์–ด์ง€ +์ˆœ์ˆœํžˆ +๋ช…์ง€ +๋Œ€์žฅ์ • +ํฌ๋ผ์šด +์ฝ˜์Šคํƒ„ํ‹ฐ๋…ธ +์ˆ˜์น™ +์‚ฌ๋“ค์˜€ +๋ฐ˜๋‹ฌ +์˜์ƒ๋ฌผ +IA +์ธ๋จผ +์˜ค๋‹ˆ +##์ˆ˜๋ฌด +์‹œ๊ธˆ์น˜ +์•ˆ๋‚˜ +ํ–ฅ๋…„ +๋‡Œ๋ฆฌ +##๋ธŒ๋ฆฌ์ง€ +์„ญ๋ฆฌ +์žฌ์‚ฐ์„ธ +๋งˆ์ž +ํ”ผ์–ด๋‚˜ +๊ณจ์ˆ˜ +๋ฐฉ์ค‘ +์‚ดํŽด๋ณธ๋‹ค +์‚ฟํฌ๋กœ +์•„์ž +๋กœ๋ฒ„ +๋Ÿฌ์…€ +๊ทธ๋ ค์ง€ +๋ธ”๋ผ์šฐ์Šค +๋ฌธ์ง‘ +๊ณ ๊ตฐ +์ผ์„ +์ง€์–ด +ํ•œ๋Œ€ +ํ—ค๋น„ +๋‹ฌํŒฝ์ด +์–ด์ฉœ +์•„๋ฆ„๋‹ค์›  +์›ํ•  +ํ”„๋ก  +๊ฐ•์›๋žœ๋“œ +๋”๋ƒ +๊ท ์ผ +์•„๋ชฌ๋“œ +๋‚˜ํŒ” +##ck +ํ•˜ํšŒ +##lish +๋˜˜๋˜˜ +์ •๋ชฝ๊ตฌ +๋‚ด๋ฆผ +์ •์‹ ๋ณ‘ +์•„ํŠธํ™€ +์‚ฌํƒ„ +๋ชจ๋”๋‹ˆ์ฆ˜ +SL +๊ฐ€๊ณก +์žฌ์น˜ +์ด๊ธ€์Šค +๋ฌดํŒจ +์ „์ฃผ์‹œ +์ง€๋‚˜๊ฐˆ +์‚ฌ๋ณ€ +##๋ฏธํŠธ +๊ตด์ ˆ +๋„์ฒ˜ +##85 +ํ•™์ƒํšŒ +์•Œ๋กœ +๋‚˜์ž +์กฐ๋ถ€ +๋Œ€ํ•™๊ฐ€ +์†๋ณด +์ง€ํ”„ +๋‹ˆ์ฝ” +๋ชจ๋ชจ +๊ณ ์น  +๋‚ ์•„๊ฐ” +์„์กฐ +์ •๊ฐ• +์žฅ๋‹จ์  +๊ฐ•๊ด€ +ํŒฌํƒ +์ž๊ฒฐ +ํ•˜์ธต +์ž๊ฐ€์šฉ +๋ฆฌํ‹ฐ +๋ฌธ์ „ +๋งˆํ‹ฐ +ํŠธ๋ ˆ์ด๋„ˆ +๋ชปํ•จ +์ง‘์‹œ +์–‘ํƒœ +๋’ค๋”ฐ๋ฅด +๋ค๋ฒผ +๊ฒฝ์‹ค +๋ถ๋Œ€ +ํ˜„์šฐ +๋ฆฌ๋น™ +์œค๋™ +ํˆฌํ•ญ +##๋น„ํžˆ +์‹ฌ๋ฏธ +๋ณ€ํ•จ์—† +์˜ˆ์˜ˆ์˜ˆ +์น˜์›  +ํ—ˆ์ „ +pol +##str +์ธํฌ +##35 +๊ธˆ์ฃผ +์‡ ์•ฝ +AB +##ravel +์Šคํƒ€ํฌ๋ž˜ํ”„ํŠธ +๋””๋”ค +์—ฌ๋Œ€์ƒ +๋ฐฉ๋ถˆ +๋„“ํ˜” +##๋จธ๋‹ˆ์ฆ˜ +##๋ฒ„ํ„ฐ +##ํ•˜๋ฅดํŠธ +๊ณ ๋“  +๊ธฐ๋งˆ +๋ฒŒ๋ ค +ํผ์ง +์ƒ๊ณ„๋น„ +๋ถ„ํ†ต +์—ฐํ•ด +๋ฏธ๋“ค +์ฒด๋… +๊บผ๋‚ธ +์ ‘ํ–ˆ +##uss +ํ”„๋ Œ์ฆˆ +ํšŒ๊ฐœ +์Šˆํผ๋งจ +๋‚œ์ œ +์™€๋ผ +MLB +๊ฒธํ—ˆ +๋ณ‘๋งˆ +์šธ์ฐฝ +๊ธˆ์ˆ˜ +์„ฑ์‹ฌ +์ฐฝ์—…์ฃผ +ํ—ฌ๋ฆฌ์ฝฅํ„ฐ +๋น„ํ˜ธ +๊ฐ€์˜ +์–‘์ฃผ์‹œ +์ž์นญ +์ด์ •ํฌ +๋ฝ‘์•„ +ํ—ˆ๋ง +##์ƒค์˜ค +์ฒœ๋ฌธํ•™์ž +์ผ์›” +๊ฐ€์••๋ฅ˜ +์ „๋ฒ• +์—ญ์™ธ +##๋ˆ„์ด +##๋ฉ์–ด๋ฆฌ +๋‚ด๋ณด๋ƒˆ +ํ‘œ์ œ +๊ฐ€๋žŒ +๋ฐ€์ˆ˜ +์šฐ๋จผ +์›์„ธ +ํ•˜๋‚˜๊ฐ™์ด +๋‚ ๋ฆฐ +์€์„œ +##๋ ˆ๋ผ +์•Œ๋ชธ +์–ด๋”จ +๊ฐ๊ทค +์ฆ๊ธด๋‹ค +๋‘๋“ค๊ฒจ +๋…ธํšŒ์ฐฌ +๋ฒŒ์–ด์ ธ +##ํŽ ๋Ÿฌ +SDS +386 +ใ†๋ณด +์„ ํ•˜ +ํ›„์ถ” +๋„ค๊ฑฐํ‹ฐ๋ธŒ +์ง€์€์ด +์ด์œ ๋กœ +์‚ฌ์˜ +๊ธฐ๋ก๋ถ€ +๋…ธ๋Ÿ‰ +์ฝœ๋Ÿผ +์ด๋ฆฌํ•˜์—ฌ +๋ˆ„์›  +##ํƒ€์˜ค +๊ฑด์„ฑ +๋Š๋ ค +์•Œ์งœ +์œค๋ฆฌํ•™ +์กฐ์šฐ +๋ฐฉ์ง€๋ฒ• +๊ธฐํƒ€๋ฆฌ์ŠคํŠธ +์‹์ค‘๋… +##sych +์ถ”์ˆ˜ +์—ฐ์„ํšŒ์˜ +์‹œ๋Œ +์งˆ์‹ +์–ด์ง„ +##ator +๋ฐ˜์  +์‹œ๋ฆ„ +๊ธ‰์„  +๋ธŒ๋ผ์šฐ์ € +๋ฒ ์–ด +๋…๋ฆฝ์˜ํ™” +ํ•˜์›Œ๋“œ +##๋งํ„ด +235 +์ •์‹ ๋ ฅ +๋ฌด๋„ˆ์งˆ +์˜์˜์ • +Thom +๋ถˆ๊ฒฝ +๊ถŒํ˜ +ํŽ˜์Šค +ํ”„๋ผํ•˜ +๋‚™์„œ +์ •ํ•œ๋‹ค +ํ”ผ์‹ +Global +๋ฒˆ์ ธ +์œ„๊ธฐ๊ด€๋ฆฌ +##MCA +ํ”ผ์นด์†Œ +cod +ํ˜„์•… +๋‚™์  +์‹œํŽธ +ICBM +์ „์ƒ +์ ˆํ˜ธ +๋ถˆ๋ช…์˜ˆ +๋งˆ๋‹ด +PA +ํˆฌ๋ฅดํฌ +์ €์  +๋ณ„๋ฐ˜ +๋‚˜๊ทธ๋„ค +๋ชจ๋กœ์ฝ” +์•ผํ›„ +##๊ต์‚ฌ +ํ™”์ดํŒ… +##ude +##์—ฐ๋ฃŒ +๋˜์ง„๋‹ค +๋“ฑ๋ถˆ +์˜ฅ์‹œ +์“ฐ๋ ˆ๊ธฐํ†ต +์•„์‚ฐ์‹œ +์œ„๋“œ +๋‚ด๋ ค๊ฐ„ +์‚ผ๊ณ„ํƒ• +๋– ์„œ +440 +ํ˜ผ์‹  +##๊นŒ๋ด +๋ฐฉ์ฆ +์†Œํ”ผ์•„ +์• ๋น„ +์–ด์šธ๋ฆฐ๋‹ค +soc +์€์‹  +๊ฒฝ๋ จ +์ œ๋นต +๋Œ€๋ฌผ +ํ•ด๊ฒฐ์‚ฌ +๋ถ๋™์ชฝ +๋ฐ›์•„ +์Šค์ฆˆํ‚ค +๋กœ๋ธŒ +##26 +์ตœํ˜•์šฐ +ํŒฐ๋ฆฌ์Šค +์‚ด์ถฉ์ œ +์ „์กฐ +๋‹ค๊ฐํ™” +์ง„์ฐฐ +์ฒด๊ตฌ +์œคํฌ +์ˆ˜์ฆ +ํ•˜๋ฌผ๋ฉฐ +๋†€๋Ÿฌ +๋’น๊ตด +๊ณต์ •์„ฑ +์ฝ˜ํ…Œ์ŠคํŠธ +ํ—ˆ๊ธฐ +์‹๋นต +Fre +ํ•™๋„ +##๋…ธ๋‹น +์œผ๋ ˆ +##์ฟ ๋‹ˆ +์‹ ๋‹น +๊ฐธ์›ƒ +ํ˜„๊ด€๋ฌธ +๋‹ฌ๋ฆฐ๋‹ค +๊ฐ„์•” +ํ™œ๋กœ +์ˆจ๊ฒจ์ ธ +520 +Min +๋‚ด๋”” +์ฝ”๋”ฉ +##์ด๋™ +ํ•จ๋ฐ• +๋„ฅ์Šค +Book +๋‹จ๋น„ +์•„๋Š‘ +์•…์–ด +์—ฐ์‚ฐ๊ตฐ +์„ ๊ฑฐ์ „ +์‹ฑํฌํƒฑํฌ +๋ฌผ์‚ด +ํ˜ธ์ŠคํŠธ +์‹ ๋ฐ๋ ๋ผ +์ฒญ์ˆœ +๊ฒฝ๋ฏธ +์ฃผ๋ฌผ +##๋‹ˆ์—„ +์ฐจ๊ด€๋ณด +์—ฐ๊ธฐ๊ธˆ +DMB +๊ณ ๋ฌผ +๋ˆ„์ง„ +MBN +##์ž์Šค +##rop +๋น„ํƒˆ +์ˆ˜์ง„ +##๋˜ํŠธ +๋ฐ”๋ผ๋ณธ๋‹ค +์ž ๊ฒผ +์ˆ˜์ž„ +์ค‘์‹ฌ์ฃผ์˜ +##work +๋ฏผ๋…ธ๋‹น +๊ถ์ง€ +์šฐ์— +ํ•ญ๊ณตํŽธ +์ž„ํŒฉํŠธ +III +์™€์š” +์บกํ‹ด +์‚ฌ๊ฐํ˜• +##์žฌํ•ด +ํŒ๋งค์  +๊ธฐ์ถœ +์  ์žฅ +์ตœ๊ณ ์œ„ +##์• ๋ฏธ +##๋ถˆ๊ต +๋ธํƒ€ +๊ธฐ๋…ํšŒ +2300 +๋‹จ์ƒ +๊ตฌ์ž์ฒ  +๊ตฐ์œ„ +๊นป์žŽ +์•ˆ์ƒ‰ +##ํšŒ์ „ +๊ฟ€๋ง› +์ฃผ๊ตฐ +ํ—ˆ๋•์ด +์–ด๋ ด +๊ฒฝ์˜๋‚œ +๋งžํžˆ +๋ฌผ๋†€ +๋ฌธ์ˆ˜ +ํŠธ์œˆ์Šค +์Šค๋ฆฌ๋ž‘์นด +์•„์„ธ์š” +์ตœ์ฒจ๋‹จ +##๋ฐ”๋ฐ” +์• ํ”„ํ„ฐ +์ŠคํŒŒ์ด๋” +๋Š”์ง€์š” +์ˆœ๋งค๋„ +์žฌ์ธ +์šฉ์•ก +ํžŒํŠธ +ํ™”์—„ +ํ• ์ธ์  +ํ”Œ๋ฆฌ +์žฅํ˜„ +๋งˆ์ด์• ๋ฏธ +ํŽธ์Šน +##๋†€๋ฆผ +๊ต์ฃผ +์œ ์ƒ +๋…ธ๋“œ +6500 +๋…น์กฐ +์›์ •๋Œ€ +๊ฒฝ๊ฐ +๋…์‹ +##ane +์‚ด์•„๊ฐ„๋‹ค +๋ฐ”๊พผ๋‹ค +์‚ฌ๋ฐœ +ํ“จ์ฒ˜ +์ œ์ฃผํ•ญ๊ณต +##ํ™์ฒ  +์™•์ • +๊ตฌ๋ฆ‰ +๋‹ฌ๋ ค์˜จ +ํ•˜๋‚จ์‹œ +์ถœ๊ทผ๊ธธ +์ ‘ํ•œ๋‹ค +๋”ฐ๋‚ธ +์•ˆ๊ธฐ +๊ทน์ž‘๊ฐ€ +PER +๋ฐ”์‹ค +์™ธํ• ๋จธ๋‹ˆ +ํ†ตํ•  +๋ณด์—ฌ์š” +์ถ”์Šค +##์œผ๋ฆฌ +์ตœ๋ฌธ +์‚ฌ๋ฏผ +๋ถ„๋Œ€ +##๋ฅดํŠธ๋ฅด +๊ณต๊ณ ํžˆ +del +๋‚จ์ž‘ +์นด๋‹คํ”ผ +๊ต๋‘๋ณด +์งˆ๋ฆฌ +์™ธ๋Œ€ +์ฐข์–ด์ง€ +๋‹๋ณด์˜€ +๋ณ€ํ•จ์—†์ด +##44 +์ฝ”์ฝ”๋„› +์ธ๋ฅ˜ํ•™ +๊ณผ์ฒœ์‹œ +๋ง์†Œ +ใ†์œ  +์ƒ˜๋ฌผ +##์นด๋„ +๋ฃจ์†Œ +๊ธฐ์žฅ๊ตฐ +์žฅ๋ณด๊ณ  +์„ฌ์ง„๊ฐ• +ํƒ๋ผ +ํƒœํ›„ +๋‹ค๋ฝ +๋ฏธ์—ฐ +๋ถ€์ถ• +์„œ์‚ฌ์‹œ +๋ฐฐ์ง€ +์œ„ํŠธ +ํ˜ธํ†ต +ํ•ด์ƒ๋„ +์ด์„๊ธฐ +๋ฐ•์™„ +ํ•ด์ง +๋ฒ ํ‘ธ +์‹œ์ผฐ์œผ๋ฉฐ +์˜์ค‘ +cafe +๋ฃจํฌ +##evelopment +Rich +๋ฏธ์ง„ +ํŒŒํ—ค์น˜ +๊น€๋‘๊ด€ +๋งค์นด +ํˆฌ์˜ฅ +##๋ˆ…์Šค +๋ถ€์‚ฐ์€ํ–‰ +๋”ํ•œ +๋‘”๊ฐ‘ +ํ•จ์˜ +๋‚ฉ์„ธ +์™€์ด๋“œ +์š”๋ฒˆ +์˜ฅ์™ธ +๊ณ ํ +์•ผ์œ  +##๋‚˜๋น„ +์ฒ ์ œ +์•ˆ์ข… +##์ „์Ÿ +์›จ์Šคํ„ด +ํ…Œ๋Ÿฌ๋ฆฌ์ŠคํŠธ +ํ†ต๊ทผ +์ˆจ์†Œ๋ฆฌ +์ด๊ถŒ +ํƒˆ๋ ˆ +ํ˜ธ๋„ +๋•…๋ฐ”๋‹ฅ +์ „์•• +๋‚จ์•„๋ฉ”๋ฆฌ์นด +rec +##๋Ÿฌ์ง„๋‹ค +๋ธ”๋ ˆ์ด๋“œ +๋ชจํ•˜ +ํ•œ๋ช…์ˆ™ +์‚ฌ์šฐ๋‚˜ +๊ต๋™ +์„ ๋ฐ˜ +##ํ‹ธ๋ Œ +๋›ฐ์–ด๋‹ค๋‹ˆ +์ง„์ค‘ +๊ธฐ์ž… +๋ฐ•๋น™ +์™€์ด์–ด +##๋ฉฉ์ด +์œ ๊ตฌ +ํ˜‘์†Œ +์–ดํš +ํ†ต์‹ ๋น„ +์ •ํ™•๋„ +์น˜์žฅ +์žฌ๋น  +๋ฆฌ๋ˆ…์Šค +ํ˜ˆ์—ฐ +##19 +##๋ธ”๋ฆญ +๋‹ค๋ถ„ํžˆ +์˜๋ขฐ์ธ +์†”๋ผ +##ํ•™์Šต +๋‚จ๋™์ชฝ +๋Š๊ฒผ +๋ชปํ•œ๋‹ค๊ณ  +์™„์Šน +ํ•ด์„ํ•™ +๋งŒํ–‰ +์ƒ๊น€์ƒˆ +์›์˜ˆ +ํฅ์ฒญ +๊ทน์†Œ์ˆ˜ +ํŒ€์›Œํฌ +๊ฐ€์˜ค +์ ๋ง‰ +๋ฐ•์‹œ +์‹œํŠธ์ฝค +๊ฐ๋ณ„ํžˆ +์ƒˆ๊ธด +์›ƒ๋ˆ +##๋งํฌ +์ด๋ด +ํ”„๋กœ๋•์…˜ +VS +ํ๋ฆฟ +๊ตฌ๋ฉ์ด +OB +๋šซ๋ฆฐ +์‹ ์กฐ์–ด +๋ฉ‹๋Œ€๋กœ +๊ทธ์ œ์„œ์•ผ +์˜์ธ +์†์ž„์ˆ˜ +##์ˆ˜์ • +์šฐ์˜ +๋Š˜๋ ค์•ผ +์ผ์ž„ +๋Œ์–ด๋“ค์—ฌ +์บ๋ฆฌ์–ด +ํ•ด์šด์—… +๊ฒŒ์‹œ๋ฌผ +228 +ํƒœ์–‘๊ณ„ +##๋ฏธ์‚ฌ +๋ฌด์ฃผ +์†ก์‹œ์—ด +๋™๊ฐ• +์•„ํ‚ฌ +๋ณ€๋• +##ein +๋ผ๊ฐ€ +ํˆฌํ‘œ์†Œ +##์“ธ์ด +์˜ํ™”์‚ฌ +ํ™˜๊ฒฝ์šด๋™์—ฐํ•ฉ +๋‚˜์˜จ๋‹ค๋Š” +์‚ฌ๋ฌด์ฒ˜ +##์‚ฐ์—…๊ฐœ๋ฐœ +Off +ํ˜•์šฉ์‚ฌ +๋ ˆ์ด์–ด +ํ•™๋ฒˆ +๋ฌด์šฉ๋‹จ +##irst +์—ฐ๋‘ +##ody +์ด์š” +๊ฐˆ๋ฆผ๊ธธ +Che +ํ›คํžˆ +๋ชฉํŒ +ํ—ฌ๋ฉง +๋ถํ•œ์‚ฐ +๋ผ์˜จ +์˜ค์ผ€์ด +๋‚˜ํ†  +์กธ๋ฆฌ +์ง๊ฒฝ +๋ฐœํ–‰์ธ +๋งˆ์ฃผ์ณค +์ฒญ์ƒ‰ +ํŒŒ๋ฒŒ +ํ—ค์–ด์กŒ +๊ธฐ๋ปํ–ˆ +##๋ฐ˜๊ตฌ +๋ชจํ‹ฐ +##๋ฒ„์Šคํ„ฐ +##ection +๋…ธ๋ฉด +ํฌ์„ญ +๋ฐ€๋ž€ +์‹น์“ธ์ด +YMCA +๋ˆ„์šด +์ˆœํ•œ +๋ฌธ์ง„ +๋ถ€์ด์žฅ +๋นฝ๋นฝ +์™ธ์•• +ํŽ˜๋” +๊ฑฐ๋ž€ +ํ•ต์—ฐ๋ฃŒ +์šฐํ‘œ +Good +์ฃฝ์ž„ +์ฐŒ๊ฐœ +์•„์ด๋น„ +์™€ํ•ด +์š”์‹œ๋‹ค +##18 +##์ƒ์ • +์‚ฌ์ƒ +ํŒจ์ŠคํŠธํ‘ธ๋“œ +์‹œํ–ฅ +##ict +์ž์ฒญ +##๋ฏธ์Šค +ํ•˜์ด๋“œ +1901 +๋””ํŽœ +ํŽ˜๋‹ˆ +๊ด‘๋‘ฅ +์ˆ˜๋†“ +๋…ธ๋ธ” +๋Œ๋ฉฉ์ด +ํˆฌ์•ฝ +๊ฑด๋„ +์˜ค๋งˆ +##bs +##๋ถ„ํˆฌ +1903 +๋ฐฑ๋‚จ +์†๋ ฅ +์ž์Œ +์„œ๋Œ€๋ฌธ๊ตฌ +James +๋ถ€์นจ +##ield +##๊ฑฐํ’ˆ +ํ•„๋กœ +์–ด๋ ดํ’‹ +๊ฐ’๋น„ +ํ•ด๋ฐ€ +๋„๋น„ +๋‚˜์•„๊ฐ” +๋ฐ•์ œ +์œค์น˜ +์ˆ˜์•„ +##arm +์„ธ๋„ค +์‚ด๋ผ +์„œ๋‹น +col +##์‚ฌ์ด๋“œ +๋๋‚ผ +1902 +##์ค‘๊ธฐ +๋‹ค๊ฐ€๊ฐˆ +๋ณด์Œˆ +๋‚ดํ‚ค +ํšŒ๊ฐ€ +๋ฌธ์ฐฝ +##์Šคํ†ค +๋ฐํ˜€์ ธ +Fed +LO +๊ต์˜ +์ตœํƒœ์› +๊ฐ๋ง‰ +ํ˜•ํŽธ์—† +๋น ์ ธ์„œ +ํ”„๋กœ๊ทธ๋ž˜๋จธ +์ง€๋‚ ์ˆ˜๋ก +์ด๋งŒํผ +๊ธˆํ˜• +๊น€์„ฑ์ˆ˜ +์‚ดํ•€ +์žฌ๊ฒฐ +๋ฌธ๋ž€ +์—†์•ค +์˜๋ น +##๊ฐ„์ ‘ +##์‚ฌํ•ญ +์ฐพ์•„๋‚ผ +๊ด€์ฐฐ์‚ฌ +ํ’€๋ฆด +์ด์™„๊ตฌ +์ŠคํŠธ๋ผ์ด์ปค +์งœ์žฅ๋ฉด +๋…ผ์–ด +์‹œ๋Š‰ +๊ฒฝ์ฐจ +๋ณ„๋ฏธ +์ง„๊ฐ€ +๋‚ฑ๋‚ฑ +##aby +์ฐธ๋ชจ์ด์žฅ +์ƒ๋„ +๋งˆ์ด์–ด +ํ ์ง‘ +์ด์šฉ๋ฃŒ +##uth +๊ต์Šต +๋ถ€๋“œ๋Ÿฌ์›Œ +๊นœ๊นœ +๋ฏธ๋ฐฑ +์š”๊ฒฉ +##๊ฐ์ƒ‰ +##inal +ํœ˜๋‘˜๋ € +์•Œ๋ฒ ๋ฅด +ํ์‚ฌ +๋น…ํ† ๋ฅด +๋ ค๋‚˜ +๋ฏผ๋ฌผ +์šฐ์Šค๊ฝ +Ev +๊ณค๋‘์„ธ์šฐ +๊ณต๋ณต +ํ™”๊ต +๊ทธ๋ฆฐ๋ฒจํŠธ +์ „๊ธฐ๋ฃŒ +๋ชฉ์ฒญ +ํƒ€์นด +ํŠนํŒŒ +ํŒŒ๋‚˜๋งˆ +์œก์‹ +##ํŒ”ํŠธ +๋‹‰์Šค +์•…์—ญ +##์—ฌ์ • +์Šฌ๋Ÿผํ”„ +์•„๋‚Œ์—†์ด +##๊ฐœ๊ณจ +๋ถ„๋‹ด๊ธˆ +์‹œ์ข…์ผ๊ด€ +์ˆ™๋ถ€ +##๊ฐˆ์ƒ‰ +ํ’€์–ด๋†“ +๋…ธ์›๊ตฌ +์ž”์žฌ +๋ถ์•„๋ฉ”๋ฆฌ์นด +์˜ค๋‹ค +๋‹ดํšŒ +๋ผ์ด๋” +๊ณ ํƒ +๋‘์œ  +์‚ฌ์ € +์—ญํ’ +์กฐ์„ ์‚ฌ +๋“œ์„ธ์š” +์„ฑํ˜„ +๊ฐ์—ผ๋ณ‘ +์ฐจ๋ฆฐ +##๋…„๊ธฐ +##acy +๋ณ‘๋“  +์†Œ๋ง +์กฐ์‹ฌ์Šค๋ ˆ +##ํ”„๋ผ๋…ธ +๋ ‰์Šค +๋ฏธ๊ฐ„ +๊ฐ€๋ฅด์นœ +##๋‚˜๋ฅด๋„ +์ž„ํƒœ +์†Œ๋‚˜๊ธฐ +๋นจ์•„๋“ค +##eau +๋ฌผ๋Ÿฌ๊ฐ€ +์ž์˜ค +##์ข…์ผ +ํฌ๋ฆฌ์Šคํ† ํผ +##์ƒ์Šน +๊ฐ€๋ธŒ๋ฆฌ +์„ ํ™” +ใ†์ค‘ใ† +์•„์ŠคํŒ”ํŠธ +๋– ๋ฐ€ +์•ค๋”์Šจ +##ont +๊ธฐ๋ผ +์‚ฌ์ด๋‹ค +๋‹จํ˜ธ๋ฐ• +๊ทธ๋ ค์กŒ +ํ‘œ๊ณ  +FBI +์„ฑํ˜ธ +์ˆ˜์™„ +##ral +๋ฏธํ•˜ +์ซ“์•„๊ฐ€ +##์Šน์ง€ +##์ง„์šฐ +ํ•ญ๊ณต์šฐ์ฃผ +##ํ—Œ์˜ +TS +๊ฐ๋ช… +ํฌ์ƒ์–‘ +ํ”Œ๋ž˜๋‹› +##์ž์™€ +GTX +์ดํšจ๋ฆฌ +๋„˜์–ด๊ฐ„ +##ํ…Œ๋ฅด๋ถ€๋ฅดํฌ +๊ฐ€๊ฐ€ +๋ฝ๋ƒˆ +์ถฉ์นญ +๊ฒฝ์ข… +๋ฐœ๋Œ€์‹ +ํ—Œ๊ธˆ +##๋ถ€์‚ฌ +##eng +๋“ค์ด๋‹ฅ +์ž์‚ฐ๊ฐ€ +๋Œ์ดํ‚ฌ +๊ทธ๋ฆฌ์šด +๋…ธ๋ฆฐ๋‹ค +์Œ์ƒ‰ +์ฃ„๋‹ค +์ฑ„์ƒ‰ +๋ณด์ˆ˜์ฃผ์˜ +๋ฐ˜๋…„ +##ior +1897 +์ž๊ธˆ๋‚œ +๊ทธ๋ฆฌ์Šค์ธ +์ด๋‚™ +์ „์žํŒŒ +๊ฒฝ๊ตฌ +์‹œ์ถ” +์ •์ +##๋งค์žฅ +๋ฐ”์ž +๋”ฐ์˜จ +์…”ํ„ฐ +ํ”ผํ”Œ +์งˆ๋ ค +๋ฐ˜๋Œ€์ชฝ +์ธ์นญ +๋ฒŒ๊ฑฐ +์Šค๋งˆ์ผ +๋ฐ”๋ž˜ +์‚ฐ๋งŒ +๊ฐ€์ง€๋Ÿฐ +๋””๋ฐ”์ด์Šค +๋ง๋ง‰ +์ผ€์ดํŠธ +ํ˜ˆ์ค‘ +ํ˜ผ์žฃ +๊ธฐ๊ถŒ +์ชผ๊ธˆ +๋ฏธ๋‚จ +์ฐฌ์†ก +์Œ์ ˆ +์ถ”๋ฏธ์•  +์ผ€์ดํ”„ +์–‘์ฒœ๊ตฌ +cont +๋Œ€๋‹น +๊ฒจ์ž +์ ๋ง‰ +์œ ์›์ง€ +ํ๋‹จ +๋š๋”ฑ +์ฆ๋น™ +##๋ถ€๋Œ€ +๋ฉ”์•„๋ฆฌ +์ฃผํšจ +##์‹œ์› +214 +##๋“ญ๋‹ˆ๋‹ค +##๋งฅ์Šค +์‹ ํ˜ธ๋“ฑ +์–ด๋ฒค +์ฐฌ์ฐฌ +๋ผ์ง€ +์ดํ†ต +์˜ˆ๋น„๊ตฐ +์†”์„  +์ปค๋…• +ํ—ˆ๋ฆ„ +##์น˜๋งˆ +์™ธ์–‘ +์ฝ”์ธ +ํ‰๋…ธ +๊ด‘์„  +๋…ธ์ด +๋ฐ•๋Œ€ +์‹ ๋“œ +์‚ผ๋ฆผ +์•ž๋‹น๊ธฐ +์ˆฏ๋ถˆ +๊ฐ•๊ฒฝํŒŒ +์•„์ด๊ตฌ +๊ทธ์„ +๋„ฌ์Šจ +ํ—ˆ๋‘ฅ +##berg +๋ชจ๊ธฐ์ง€ +๊ฐ๋…๊ด€ +ํˆฌ์ž์ฒ˜ +app +##์‚ฐ๋ฉด +##์˜์ง€ +์ž์—ฐ์žฌํ•ด +ํ”ผ๋ณดํ—˜ +์ด๋ป +์ •์œก +์™„์ œํ’ˆ +์˜ฌ๋ ค์•ผ +##ํ† ๋ฏธ +๋ฏธ์ƒ +ํ›„๋ ค +๊ฐ€์—พ +##ํšจ์ง„ +์ž”ํ•ด +offic +์—ฐ๋Œ€๊ธฐ +์ˆ˜์˜๋ณต +##๋ฉ”๋‹ˆ์•„ +##์„๊ธฐ +##ํ”Œ๋ฆฟ +ํ™ฉ์šฐ์—ฌ +๊ณผ์˜ค +ํ™”๋ชฉ +๊ฑฐ๋“ญ๋‚  +๋ˆ๋… +##๋œ๋ฏธ +##์‚ฐ๋ฆฌ +##bert +๋‹ซํ˜€ +์—๋ฐ€ +ํ†ตํ–ˆ +๋ถ„๋ถ„ +์Œ€๊ตญ์ˆ˜ +์ƒ๋ฆฌ๋Œ€ +trans +๋ฒ„ํŒ€ +##๋กœํ”„ +๊ณต๋ถ€๋ฐฉ +460 +์ฒญ๋™๊ธฐ +๋ฌถ์ธ +๋ฐ€ํ +์žกํ™” +๊ฒฝ์‹ค๋ จ +๋น„๊ฒ +์›Œ์‹œ +ํ•ด๋…€ +##๋ฒ„๋ฆด +์ด์Šนํ›ˆ +##์ง€๊ตฌ +##95 +kW +๊ธธ๋“ค์—ฌ +๋ฐ˜๋ถ€ +๋งŒ๋ฃจ +๋ถˆ๋Ÿฌ์˜ค +##์—ฌ๋ฆ„ +##DC +์ž ๊ธด +๊ณ ๋“ฑ๊ต์œก +๋Ÿฐ๋‹ +ํŽ˜๋ฅด๋‚œ +##๋‹ˆ์‹œ +์„ธ์šด๋‹ค +๋งˆํƒœ +๋ถ€๋“ค +์ˆ˜๋นˆ +์—ฐ๋‹จ +ํ•ฉ์„ฑ์–ด +๋“ฑ์‹ฌ +๋ถˆ๋ฅœ +๊ธฐํŒ +๋‚จ์กฐ์„  +์ด๋Œ€ +๋ฌด์ž๋น„ +๋™์‹๋ฌผ +์ทจ๋ก +##์ง€๋ฌผ +๋ถˆ๋งค +ํ•œ์†” +ํšจ์ข… +๋“ค๋ ค์ค€๋‹ค +์ขŒ์ง€์šฐ +์ขŒ์ง€์šฐ์ง€ +๋ฌด๋”์šด +๋ฐœ๋งž์ถฐ +๊ฐœ์ง„ +๋ ˆํŠธ +๋ฏผ์›์ธ +ํ™•์—ฐ +๋ฒ„ํด๋ฆฌ +์†Œ๋‚˜ํƒ€ +##์ค‘๊ฐ„ +Chris +gl +์ถ”ํ–‰ +##์ œ๊ฐ• +๋Œ€ํ•  +๋ฟŒ๋ฆฌ์น˜ +##์ˆœ์œ„ +ํ˜„๋Œ€์‚ฐ์—…๊ฐœ๋ฐœ +๋งค์šด๋ง› +๋ถˆ๋ฉด์ฆ +Don +์‹ ํ˜‘ +์ง„๋‹ค๋ฉด +๊ฒฝ์ˆ˜ +##๊ฑฐ์Šจ +์ž„์˜ +๋ธ”๋ก๋ฒ„์Šคํ„ฐ +216 +WSJ +๋‹‰์Šจ +์ผ์‹  +KBL +๋Œ€ํ•ฉ +๋Ÿฌ๋ธ” +์‚ฌํ–‰ +์œ„์ž๋ฃŒ +์นด๋‚˜ +ํ”ผ์‚ฌ +์ง€๊ตฌ๋Œ€ +๋ชธ๋šฑ +์—ฐ์ˆ˜๊ตฌ +๊ณ ์กฐ์„  +๋กœํŽ˜ +๋ฆฌํ—ˆ +๋นŒํ—ฌ +์•„๋ฅด๋ฉ”๋‹ˆ์•„ +##์‹ ํ˜ธ +์“ด๋‹ค๋Š” +๊ตํ•™ +๋œ๋‹ค๋ฉฐ +์ง€์ •ํ•™ +##๋ฏธ๋ฆฌ +๋…์„  +##ific +์ค‘๊ฑฐ๋ฆฌ +์žฌ์กฐ๋ช… +KOTRA +๋ฒˆ์ฐฝ +์ฃผ์–ด +์Šค์นดํ”„ +๋ฌธ์‚ฌ +##ํ†ก์Šค +๋งˆํ”ผ์•„ +ํ–‰์—ฌ +์—๋„ˆ์ง€์› +์‚ฌ๋“ค์ธ +์œจ๊ณก +์Šค๊ฐ€ +##ally +์•Œ๋ž˜์Šค์นด +##์‹œ์šธ +Bro +์—ฌ์˜์น˜ +๊ธˆ์˜ค +์‹ฌ์žฅ๋ณ‘ +๊ตญ๋ฌธํ•™ +๋ˆˆ์‹œ์šธ +์„œ์ปค์Šค +๊ฐ€ํ‰๊ตฐ +์‚ฐ์‚ฐ +๋’ท์ „ +์ ๋ฆผ +##ํ˜ˆ์•• +๋ชจ์‚ฌ +Net +๋…๊ณผ +๋นจ๊ฐ› +๊ตฌ๊ธ‰์ฐจ +์ฐฝ์‚ฌ +##๊ฒฐ์—ฐ +๋ถ€๋ก +์„ธ๋ฌด์„œ +๋„์ +ํ˜„๋ž€ +์„ ๋• +##๊ทน์žฅ +๋Œ€์˜ +Do +๊ณต๊ธ‰๋Ÿ‰ +์˜ค๋ฝ๊ฐ€๋ฝ +##๋“œ๊ธฐ +ํŽญ๊ท„ +2100 +์‹ ์ถ˜ +์ž์นด๋ฅดํƒ€ +๊ถํ• +๋ค๋‹ค +๋งค๋‹ฌ๋ฆฐ +##ํžˆํŠธ +##๋ฐ•์งˆ +##์„ธ๋ผ +๋‚จํ•œ์‚ฐ์„ฑ +์ƒ๋™ +์ž๋งค๊ฒฐ์—ฐ +๋”๋น„ +๋งํ•  +์ค‘๋“ฑ +์ค‘๊ธˆ์† +๋ถ€๊ธฐ +๊ฑธ์–ด์˜ค +๊ฐ€์™• +๊ตด๋ ˆ +์œ ์„ฑ๊ตฌ +ESS +ํŒŒ์ผ๋Ÿฟ +์„ค์น˜๋ฅ˜ +##์Šฌ๋ผ๋น„์•„ +์ค‘์‹ +์ค˜๋„ +ํด๋ผํฌ +ํ•œ์šธ +์˜๋ฏธ์‹ฌ์žฅ +์—„์—ฐํžˆ +##์‚ฐ๋งฅ +๊ตญ๋ฏผ๋Œ€ +ํ•˜์—ฌ๊ฐ„ +##๋ณ€์ด +์•„๋ž์–ด +xxx +์ฐธํŒ +์ˆ˜ํ•™์—ฌํ–‰ +๋ธŒ๋ ˆ์ด +๊ณ„์ฑ… +์น˜๊ณ  +ํ’€์–ด +ํž˜์—†์ด +์•…๋ฌผ +##๋ฉ˜์Šค +##tian +๋ฐฑ์ง€ํ™” +๊ฐ’๋น„์‹ผ +๋ฏผ๋ฐ• +๋Œ€์ผ +ํŽธ์‹ +๋ˆ„์„ค +์บ๋…ผ +ใ†๋ถ€ +์ƒํƒœํ•™ +๋ฏผ์šฐ +๋‹ค์ด์–ด๋ฆฌ +์ •์ฃผ์˜ +์œ ๊ณ ์Šฌ๋ผ๋น„์•„ +๋ชฐ์ด +๋ฐ˜์‹  +์œคํ™œ +์šฐํ™” +๊ตญ๋‚ด์„  +Instit +๊น€์ˆœ +์ง•์—ญํ˜• +๋ณธ๊ณ  +์ฅฌ์–ผ +ํ–ฅํ•œ๋‹ค +ํญ๊ฒฉ๊ธฐ +๋„์˜ +ํ•™๊ธฐ์ œ +##ret +ํ™ฉํ˜ผ +ํ‰์˜ํšŒ +์Šค๋Ÿฌ์šฐ +์žฅ์ถฉ +##์œ ๋ฆฌ +๋‚œ์น˜ +๋‚œ์ง• +๋ฃจ์นด +๋ฆฌ์ž +ํŽ˜๋ฏธ๋‹ˆ์ŠคํŠธ +์˜์ผ +๋ถ€๋ฌธ์žฅ +์กฐ๊ฐ๊ฐ€ +ํƒ€๋งˆ +์‹œ๊ณต๊ฐ„ +์•„๋ จ +ํผ๊ฑฐ์Šจ +ํžŒ๋‘๊ต +์‚ด์•„์˜ค +์ด๋ก ๊ฐ€ +์ œํ˜• +ํžˆ๋ธŒ๋ฆฌ +Im +##์กฐ์น˜ +์ถฉ์ „๊ธฐ +์ƒค๋„ฌ +์ค‘ํ•™ +๋ด‰ํ™ฉ +๊ตฐํฌ์‹œ +์Šฌ๋ผ์ด๋“œ +##์„ธ์›  +๊ถ๋…€ +Public +๋ฆฌ๋ธŒ +ํšƒ๋ถˆ +##์ฐฝ์ˆ˜ +##๋ถ๋ฉด +์˜ค์Šค์นด +ํŒ”์งฑ +ํŠธ๋ฆฌ์˜ค +์ŠคํŒธ +์••๋‘˜ +๋ ค๋Š”๋ฐ +์–ด์ง€๊ฐ„ +์ƒ๋ƒฅ +์ €๋†ˆ +๋Œ์—ฐ๋ณ€์ด +School +๊ฐ€๋Š +์‹ค์ง€ +๊ฐ•์—ฐํšŒ +์„œํ”„ +1880 +ํผํŽ™ํŠธ +##ca +์–ฝ๋งค์ด +##์น ๋ฆฌ์•„ +๊นกํ†ต +๋ฐ”์˜ค๋กœ +๋งˆ์™• +ํ•œ์‹  +์œค์ • +์ผ์ธ +์˜ค์•„์‹œ์Šค +์„œ์ž +ํ”„๋ ˆ์ด +๊ด€์˜ +๋˜๋ฐ์š” +์ •์ผ +์‚ฐ์–‘ +##ating +์ง๋ฌผ +##๋ชปํ•ด +๋ถˆํƒœ์šฐ +์„ธํ„ฐ +์ฐธ์œผ +์ฒ˜์น  +ํ”„๋ŸฐํŠธ +์Šน๊ธฐ +๋ณต์—ญ +##ograp +์‹œ์˜ +์•ˆ๋‹ค๊ณ  +##์ผ€์ด๋“œ +์‚ฌ๊ฐ„ +์š”์…‰ +๋ฏธ๊ด€ +ํ”„๋ผ์ž +์•ˆ์ผ +##urch +์ฒœํ˜ธ +๋ฟŒ๋ ธ +##ํ™”์งˆ +๊ด€์„ฑ +##๊ทธ๋ผ๋“œ +๋นจ๋ž +๊ฑธ์–ด์•ผ +์–ด๋ ค์›Œ์กŒ +ํƒ•์ˆ˜ +##์ง„๋กœ +์ฒœ๋ฌธ๋Œ€ +์ง€๋ น +์กฐ์žฌ +์กฐ์›๋Œ€ +๋„ํ† ๋ฆฌ +ํƒ€๋„ +##๋น„์–ด +##ony +์„ฑ๋‚œ +์งŠ์–ด์ง€ +##๊ทผ๋ฌด +์—”๋“œ +##29 +๋…น๋ก +๊ตญ๋ฏผ์นด๋“œ +๊ฝƒ๋‹ค๋ฐœ +์‚ฌ๋“ค์—ฌ +์• ์ ˆ +์บ์„œ๋ฆฐ +๋ฒ ๋ผ +๊ตฌ์ • +##์—์ดํ‹ฐ๋ธŒ +๋“ค๋ฅด +๋ฐ˜๋ฐ˜ +์•„์ฆˆ +์ž์ž„ +##uch +์ฒœ์™• +์†์‚ญ์˜€ +์œ„์„ธ +๋Œ€์›…์ „ +๐Ÿ‘๐Ÿป +##์Šคํ„ดํŠธ +๋ถ€๊ฐ€์„ธ +์ •ํ˜œ +๊ณ ์œ„ํ—˜ +##ํƒ€์ž +๊ณต๊ณต์—ฐ +์Šฌ๋žจ +์„ ๋ฐœ์ „ +๋ฌผ์–ด๋ณผ +##ํ๋กœ +๋นŒํ—ฌ๋ฆ„ +๊ฐฑ๋…„๊ธฐ +๋Š™์€์ด +2600 +๋‚ธ๋“œ +๋ง๋”ํžˆ +ํ‡ด์ง์ž +EL +๋ฒ„๋ฌด +๊ต๋ฌธ +๋ผ์ดํ„ฐ +๋ธŒ๋ฆฟ์ง€ +์„œ์ง€ +๋ฐ๋“œ +๋‚จ๊ฒจ์ง„ +##์ฝ”๋น„์น˜ +๋น—๋ฐœ +์ค‘์ฒฉ +๊ต๋ถ€๊ธˆ +์ ์  +๋„ํ˜• +๋ฏผํŠธ +์กฑ๋ณด +ํฌ๋ฆฌ์—์ดํ‹ฐ๋ธŒ +Val +์—„์„  +์ €์šฐ +์—ผ์ƒ‰์ฒด +๋ฆฌํ„ด +245 +๊ตฌ๋งค๋ ฅ +์ƒ์ธต +๋‚™์ฒœ +213 +๋ณด๋“ฌ +๋ณด์ˆ˜๋‹น +์‚ฌ๋ชจ๋‹˜ +County +์›”๊ฒฝ +##ํŽ˜๋ฅด +๋ฐ”์•ผํ๋กœ +๋ƒ‰ํ˜น +ํ›„์ง„ํƒ€์˜ค +##์ผ๊ธฐ +226 +๋‹ˆ์ฝœ๋ผ +##์•”๋ฆฌ +๋งค๋‹ฌ๋ ธ +##์ขŒ์„ +์˜ฌ๋ฆผํ”ผ์•„ +์ด๋‚™์—ฐ +##tm +์ง€๋‚˜์ณ +์ค‘ํฅ +๋„๋ง์ณค +##๋ฐ•์‚ฌ +์‹ค๋ง๊ฐ +๊ฐ€์ธ +๋ฐ•๊ด‘ +ํŒ”๋š +Inc +๋ธŒ๋กœ๋“œ๋ฐด๋“œ +๊ฒฝ๋Œ€ +์˜์ด +##ํƒ์‹œ +๊ฒฝ์€ +์ˆ˜๋ ค +๋ฌดํ„ฑ +ใ†์‹  +๊ณ ์„ฑ๊ตฐ +ํ†ฐ์Šจ +๋ชจ์–ด +๋™๊ตญ์ œ๊ฐ• +๋‚œ์ž +์—๋ฐ˜ +##์•„ํŠธ +์–‘์ • +##๋ฒ„์‹œ +๊ทผ๊ฑฐ์ง€ +ํ˜•์ด์ƒ +##ํ•˜์ด๋จธ +์ฃผ๋ก€ +์ธ์Šคํ„ดํŠธ +ํ˜ธ๋ฆฌ +##led +์ •์น˜๊ถŒ๋ ฅ +์š”์ถฉ์ง€ +##๋ฌด์ƒˆ +์ƒ์ฅ +์‹œ์ œํ’ˆ +ํƒ€๋ น +##ํฌ๋กœ +์‹ธ์›Œ์•ผ +##๊ธ€๋ฆฌ +์ธ์ฒœ๊ด‘์—ญ์‹œ +๋ณด์Šค๋‹ˆ์•„ +##ํŠน๋ณ„์ž์น˜ +๋ฌผ๊ฑฐํ’ˆ +์ด๋ผ +##ople +๊ณค๊ฒฝ +##๋ฆฌ์‚ฌ +##์ผ€์ดํฌ +๊ฒŒ๋ฅด๋งŒ +##์ˆ˜๋ฌด์ฑ… +๋ชจ๋ฉด +pe +์†์ˆ˜๋ฌด์ฑ… +##ํ›ผ์† +๋…ธํฌ +๊ธฐ๊ฐ‘ +##ํŠน๋ณ„์ž์น˜๋„ +๋ธ”๋ผ์ธ +์œ—๋ถ€๋ถ„ +ํŽ˜๋„ํ‹ฐํ‚ฅ +์ฐธ์™ธ +##ael +##๋…ธ๋ถ€ +๋ˆ์งˆ๊ธด +๋งค๊ฒจ +์‹ ๊ฒฉ +์‚ฌ๋ ค +์ฑ„์  +์œ„์•„๋ž˜ +์ค€์˜ +๋งž์•„๋–จ +##ํ•™์ƒํšŒ +good +์˜ฅ์ฒœ +๋œ์žฅ์ฐŒ๊ฐœ +์ œ์ฃผํŠน๋ณ„์ž์น˜๋„ +๊ณ ํ”„ +๊ฐ„๊ฐ„์ด +์ž…ํ˜” +๋‚ด๋ ค์ง€ +๊ฒฉํ‡ด +๋ฐœ์ง„ +๋ฏธ๊ตญ์‹ +๊ณตํ„ฐ +์‚ผ์›” +๋ฐฉํŒŒ +์ˆ˜์˜จ +์กฐ๋ชฉ +๊ฐ•์œ  +๋ฐํฌ +##ft +์š•์กฐ +๋ถ„์„๊ฐ€ +##์ž๋™ +๊ฐ€ํ†จ๋ฆญ๊ตํšŒ +ํšŒ๋น„ +ํ•œ๊ตญ์ผ๋ณด +##mann +์กฐ์„ ์™•์กฐ +์œ ํ†ต๋ง +๊ฐ•ํ˜ธ๋™ +์›๊ธฐ +์ €๊ฑด +ํ’€๋ฟŒ๋ฆฌ +์–‘์šฉ +ํˆฌ์ˆ™ +๊ตํ†ต๋Ÿ‰ +์šฐ์ง€ +์ด์›ƒ์ง‘ +์ ์‹ญ์ž์‚ฌ +๋…ธ๋‹ˆ +์‚ฐ์‚ฌํƒœ +์ดํ•™์ƒํšŒ +์ž„์ƒ์‹œํ—˜ +๋‹ค๋…€์•ผ +๊ธด๋ฐ€ํžˆ +##ulture +์›์ƒ +one +์บ”๋”” +์‹ ์ง€์•  +์š”์–‘์› +์˜์˜ +##๊ธธ์ด +์ƒ์‚ฐ๋ ฅ +๋ถ„๋งŒ +ํŽ ๋ฆฌ +##์Šคํƒ€์ผ +์ตœ๊ฐ•ํฌ +๊ตญ์ œ์ถ•๊ตฌ์—ฐ๋งน +์•ˆ๊ณผ +์™ธ์„ธ +์ฅ์–ด +์šธ๋ถ„ +์‚ฐ๊ฐ„ +##ํ•˜ํ•˜ +##hel +๋Œ€๋†“๊ณ  +์Šค์ฝ˜ +์•ค์„œ๋‹ˆ +##์—ด์ฐจ +๊ฒ€๋ฌธ +์˜ค์ˆ˜ +์„œ๋ฐ”์ด๋ฒŒ +##EN +๋‘˜๋ ˆ๊ธธ +์ œ์ด์Šจ +๊ตญ์†Œ +์Šค์ฝœ +์–ต๋Œ€ +์ž…์†Œ +๋ฌดํ‘œ +๋ถ€ํ’€๋ ค +์žฌ๊ฒฝ +๊น€๋ถ€๊ฒธ +์ผ€์ž„๋ธŒ๋ฆฌ์ง€ +์ด์„ +๋ฌปํžŒ +๋‚œ๋ฆฝ +##oft +์ฝคํŒฉํŠธ +๋ฐฉ๊ด‘ +์ด๋Œ๋ ค +๋ฆฌํ—ˆ์„ค +์ •๋ณ€ +Work +๋ถˆ์–ด๋‚˜ +##ํ…Œ์ด +์‹ ์ค‘ํžˆ +๊ฐ•์„ +๋ณด๋‚ด์™” +๋ณด๋ฌธ +์ข…๊ต์ธ +275 +๊ฒฝํ˜ธ์› +๋ช…๊ณก +๋ฆฌํ•„ +##์„ธ์šฐ์Šค +๋Œ€ํ˜•์ฃผ +๊ฒฐ์„ +๊ฒฉํˆฌ๊ธฐ +๋‚ด์ถ” +์˜ฌ๋ ค์„œ +์ƒ๋ฏผ +ํ›„๊ฒฌ +ํƒˆ๋ ˆ๋ฐ˜ +๋–จ๋ ธ +์•ผ์ „ +๊ฑธ์–ด์™” +๋ฒŒํฌ +๊ฐ‘์ž‘์Šค๋Ÿฌ์šด +๊ฑธ์œผ๋ฉฐ +์†์‚ด +์‘์ง‘ +์ ์› +ํŠนํŒŒ์› +๊ตฌ๋ฃก +๋ฌธํ˜ธ +ํ•œ์Šน +๋‹ฌ๋ฆฌ๊ธฐ +2800 +๋นˆํ‹ฐ์ง€ +์Šน๋ณต +์ฑ™๊ฒจ์•ผ +์กฐ๊ณต +๊ด‘ํ™œ +Li +๊ฐ„๊ณก +##very +์„ ๋ก€ +ํ•ด์‹œ +์ถœ์‚ฐ์œจ +๊ฒจ๋ฅผ +์‚ฌ๋‹ค +##ave +๋งˆ์žฅ +๋ฒคํŠธ +๋‹จํ–‰๋ณธ +์ด์— +์ซ“์•„๋‚ด +์„œ์ƒ +์ŠคํŠธ๋ฆฌ๋ฐ +๊ฑธ์–ด์˜จ +๊ฒ‰๋ณด๊ธฐ +๊ฒฐ์žฅ +๊ธ‰๋ฐ• +์žฅ์„ฑํƒ +๋ง›๋‚œ +์•ˆ์ ค +Can +์ •์šด์ฐฌ +ใ†ํ•œ +950 +์„ฑ์ทจ๊ฐ +ํˆฌํ•˜ +##๋žญ์ด +ํฌ๋ผ์ด์Šฌ๋Ÿฌ +์˜ˆ๋ฉ˜ +๋ˆ์  +๋†“์ณ +๋งˆ๋ชจ +์‘๋Œ€ +์ฐฌ์ฐฌํžˆ +๊ตฌ๊ตฌ +๊ฐœ์ฒœ +##๋ฌธํŠธ +๋˜์งš +์ž์—ฐ์ฃผ์˜ +์ฆ๊ถŒ๊ฐ€ +๋ช…๊ธฐ +์„ฑ๋™๊ตฌ +๋ ˆ๋น„ +์˜ค์กด +##์—ฐ์ˆ˜์› +๊ตฌ๋„์‹ฌ +๊ณ ์Šน +##ful +์งํ•จ +ํŠน์กฐ +Ber +์šฐ๋นˆ +##irl +์†ก์•„์ง€ +์›์ฒญ +๊ฒฐ์† +GN +ํ—ˆ๋น„ +์Šค๋…ธ์šฐ +์žฌํŒฌ +๋’ท๊ฑธ์Œ +##๋กœํ˜ธ +๋ฏธ๋ž˜ํ˜• +์•„๋ž˜์ธต +##๊ฐ€๋“œ +##ross +๋ฐฉ๋„ +ํ™ฉ์ƒ‰ +ํ™๊ธธ๋™ +์ž์œ ์ž์žฌ +๋ฆฌํŠธ +์ฐธํšŒ +์˜ค์šฐ +ํ”„๋ผ์ด๋ฒ„์‹œ +์ „๋…„๋„ +๊ณ ์€ +##ํŠ€๊น€ +๋ณดํ†ตํ•™๊ต +์†Œํ˜•์ฐจ +๋– ์˜ค๋ฅผ +์ง์‚ฌ๋ž‘ +๋…์ด‰ +๋ผ์ง€๋งŒ +๋ž ๋ฆฌ +๋‚˜์˜จ๋‹ค๊ณ  +530 +##๋Œ€๊ฐ„ +์ƒˆ์‹น +๋ธŒ๋ผ๋”์Šค +๋‚ ๋ ต +๋„ํ•˜ +๋ณต์Šต +์ด๋ง˜ +์•Œ์ฐฌ +๋ฌธํ™”์‚ฌ +์ด์‚ญ +##ํ™”๊ฐ€ +##๋น„์Šท +์žฅ์ „ +์กฐ์†Œ +##๋Œ•์ด +์‚ฌ๋ฆฝ๋Œ€ +##๋งŒํผ +์ €๋„๋ฆฌ์ŠคํŠธ +๋ผ์ฆˆ +##๊ตฐ๋ฐ +##์ˆ˜๋„ +##ํ‹ฐ๋ชจ์–ด +๊ฒฌ์ค˜ +์ฟ ๋ฅด +๋ฌดํ•™ +์ตœ์œค +์ž”์กด +์•„๊นŒ์šด +Reg +๊ฑฐ์นจ์—† +๋จธ๋ฆฌ์นผ +๊ฒŒ์žฅ +๋ฐฉ์•„ +์ž๋ฌผ +์–ด์šธ๋ฆผ +์ƒ์˜๊ด€ +๋ถ€ํ‰๊ตฌ +720 +๋งค๋ฏธ +##๊ณต๊ฐœ +์ „์‚ฐ๋ง +##์ฝ”ํŠธ +๋‚ด์ถ”๋Ÿด +๊ฐ€๋ณ€ +๋ฐฑ๋‘๋Œ€๊ฐ„ +Day +๋ˆ„๋ช… +๋ช…๋งฅ +ํ•˜์ • +์ž๊ธฐ์žฅ +THE +์‹ค๋ฃจ +390 +์„ž์ด +๋ง‰์žฅ +##๋ง‰์—ผ +์œ ์ฒด +๋„์ƒ +๋ญ‰ํด +ํ˜๋ช…๊ฐ€ +๋ณด๋ผ์ƒ‰ +๊ป‘์ถฉ +๋ถ€์—ฐ +๋ถ์œ„ +ํ™”ํ›ผ +ํ„ํ•˜ +Green +##ํ‹ฐํ‹ฐ +๋ก ์Šคํƒ€ +์„ญ์”จ +์ด์ฝ” +์„ธ๋ผ๋ฏน +์Šคํƒ€ํ‚น +์ˆ˜์ „ +์ค‘ํ™˜ +๊ฐ์•ก +๋ฐฅ๊ทธ๋ฆ‡ +๋ด‰์•ˆ +์ด์•ˆ +##ํŠธ๋ฆฌ์˜ฌ +๋‹ˆ๋‹น +์•„์‚ญ +##์ •์‚ฌ์‹ค +๊ธฐ์ •์‚ฌ์‹ค +ํ™”๊ฐ• +๋ชฌํŠธ๋ฆฌ์˜ฌ +์žฅ์„œ +๋“ฑ๋ก์ฆ +๊ด‘๋™ +๋…ธ๋ฐ” +ํ‰ํ™”์ƒ +๋‚œ๋ฐ +๋ช…ํƒœ +๋Œ€์ˆ˜๋กญ +๋ถ€์ณ +์ˆญ์‹ค +๊ณ ์•ฝ +ํฌ๋น„ +๊ฐœ์†Œ์‹ +##48 +๋ ˆํฌ +์‹ธ์ธ +์กฐ์„ ๋Œ€ +์ ‘ํ•ฉ +์ •ํ’ˆ +##์‹ ๋ณด +์ค‘๊ธฐ์ฒญ +์ฝํžˆ +301 +์ผ€๋ฆฌ +๋ธŒ๋ฆฌํ‹ฐ์‹œ +ํ•œํ•˜ +ํœ˜๊ฐ +๋ช…๋‹น +์ „๊ธฐ๊ธˆ +์ง€์‚ฌ์žฅ +ํŠน์„  +ํ–‰์ •์†Œ์†ก +MRI +๋’ค์กŒ +๋ชจ๋ ˆ +์ž‰ํƒœ +๋ฐ”์‹น +๋™์–‘์ธ +ํ”ผ์ผ“ +์ง„์งœ๋กœ +์™„๊ตฌ +๊ธฐ๋ณต +223 +์ฟ ํŽ˜ +๋Œ€๊ธฐ์‹ค +์„ฑ๋ถ๊ตฌ +์™ธ์—ฐ +์ตœ๋ณ‘ +##its +ํ”„๋ฆฌ๋žœ +๊ฑฐ๋ ธ +๋ณธ์› +##์œ ๊ตญ +๋ด‰์ˆ˜ +์žฌ๊ฐ€๋™ +๊ฟ‹๊ฟ‹ +์ฐจ๋„ +์žก์•„๊ฐ€ +ํŒ๋… +๋ช…์˜ˆํ›ผ์† +##์บ‰์Šค +์Šฌ๋ผ์ด๋” +์ „๊ทผ +ํŒ์—… +์–‘ํ‰๊ตฐ +์ •์น˜๊ตญ +๋น„์Šค๋“ฌ +์™ธ์ณ +ํŽด๋ƒˆ +Cont +##๋ฅด๋ฅต +๋’ท์ขŒ์„ +ํ™์ง„ +๋“œ์‹ค +์ŠคํŒŒ๊ฒŒํ‹ฐ +๋ฐ”์บ‰์Šค +๊ณ ํ˜„ +ํƒ€์šฐ +##์ผ๋น„ +์–‘๋ฐฉํ–ฅ +๊ฐ•ํ˜„ +์ง€์›์„œ +๋‘๋ฆฌ๋ฒˆ +๋ฒ ๋‹ˆ์Šค +##๋ก ์ž +์—‡๊ฐˆ๋ฆฐ +์€๋‹‰ +์ •ํ•ด์กŒ +์˜ํ†ต +์ฐจ๊ณก +์ด์–ด์ฃผ +๋‚ฑ๋‚ฑ์ด +๊ฐ์น  +๋’คํ‹€ +๋’ท์‚ฐ +์ˆœ์ • +์ •์œค +์ •์„ฑ๊ป +ํ† ์š”ํƒ€ +๋„์•ˆ +##๋ฉ”๋“œ +GC +##์‹œ๋ธŒ +๊ฐ•ํŒ€ +๊ธฐ์š” +ATM +์‹œ๋ฌด +๋™๊ทธ๋ž— +์˜ค์ƒ‰ +์žํ™”์ƒ +๊ฐ€์—… +ํŽ‘ํŽ‘ +๋งค์  +์ž๋ชฝ +๋‚จ๋™๊ตฌ +์‚ฐ๋‘ฅ +๊ณต๋ชจ์ฃผ +##apan +์ด๊ตฌ +์œ„์•ฝ +ํƒ€์‚ฌ +์ฒด๋“ +์ง€๋„์ธต +๋Ÿฌ์‹œ์•„์–ด +์น™์น™ +ํ’๋ฅ˜ +##43 +ํ•˜์ค‘ +๋…ธ๋ผ๋ฉด +ํ‘์ƒ‰ +๋ถ๋™๋ถ€ +##89 +๋‚œํ•ด +์„ฑ์ฒด +์—ฐ์ œ +##์˜ฌ๋ฆฌ +๋ฌธ์ œ์ง‘ +์€๋น› +๋ณธ์ฒญ +##๊ด‘์žฅ +๊ฐ•์‚ฐ +##ํ•™ํŒŒ +์ˆ˜์ถœ๊ตญ +์ค„๋ฆฌ์•„ +๊ตญ๋ฌด์ด๋ฆฌ์‹ค +ํƒ€๊ตญ +์–‘๊ฐ€ +์ •๋‘ +๊ณ ์ธ๋Œ +๋ชฐ์ˆ˜ +์—ฐ๋ด‰์ œ +๊ดด์‚ฐ +์ง์‚ฌ +์‹ฌ์˜ค +๋ถˆ๋Ÿฌ์˜ฌ +๋ณผํ‹ฐ๋ชจ์–ด +ํ–‰์ •ํ•™ +์‹ ๋“œ๋กฌ +##ship +์—ฌ์žฅ +ํ˜ผ์žฃ๋ง +##์›Œ์ฆˆ +๋„์› +##์Šคํƒœํ”„ +๊ตฌ์„ค +ํ˜ธ๋กœ +๋ชจ๋‚˜์ฝ” +๋™๋–จ์–ด์ง„ +์ง„ํ†ต์ œ +##MP +๋…ธ์ˆ™์ธ +๊ธˆ์œต์—… +๋œฌ๊ธˆ +Em +๋ถ€์ ๊ฒฉ +๊ฝ‚ํ˜€ +ํŒŒ๋งˆ +How +๋นŒ์–ด๋จน +์†Œํ”„๋ผ๋…ธ +์„ ์ทจ +๋ฏผ์ง€ +๋…ธ์ ์ƒ +์—‰์—‰ +์™„์ถฉ +##์ฒด๋Šฅ +min +๋งˆ์ฒœ +##๋งˆ์น˜ +์–ด์จŒ๊ฑฐ๋‚˜ +๋ฐฑ์–ต +๋นก๋นก +๋Œ์•„์„  +ใ†์ œ +์ƒ๋Ÿฌ๋ฆฌ +๋‘๋ฅธ +์–ต๋ˆ„๋ฅด +##๊ตฌ๋ ค +๋ณ€์†๊ธฐ +์กฐ๋ผ +๋…ํ•œ +##ํŒจ์Šค +๋ถ„ํ™์ƒ‰ +์ˆ˜๊ณต +ํ”ผ์ž„ +๋ธ”๋ผ์ธ๋“œ +ํŒ”๋ ˆ +##ํ† ๋ฅด +๋ฐ‘๊ทธ๋ฆผ +๋ฐฉ๊ณต +๊ทผ๋ฌด์ž +์ง€์ฒœ +์ด๋ ฅ์ „ +์ฒœ์ •๋ฐฐ +๊ฒฝ๊ฐ์‹ฌ +์ฐพ์•„์˜จ๋‹ค +๊ณต์‹œ์ง€๊ฐ€ +์งˆ์งˆ +์„œ๊ธฐ๊ด€ +##unication +๊ฒ‰๋ชจ์Šต +๋ณ‘์•„๋ฆฌ +์ฑ”ํ”„ +์˜ˆ์ •์ง€ +์ˆ˜ํƒˆ +Poli +ํŽด๋ƒ„ +ํ”ผ์กฐ๋ฌผ +์„ค๋ฆฌ +ํ• ๋กœ +์€ํ–‰๋‚˜๋ฌด +๋””๋”ค๋Œ +๋ฆฌํฌ +217 +๋ฌด์„œ์›  +๋ฎ์ธ +##ํžˆ๋“œ +๋„๋ฐฐ +๊ณ ๋ ค์ธ +์ฒ˜๋ฐ• +##๋‹คํˆผ +To +##์›Œ์Šค +๊ธ‰์„ฑ +๋…๋ฆฝ๊ตฐ +๋ธ”๋กœํ‚น +์ •์ฐฌ +๊ต๊ณผ๋ชฉ +๋ถ‰์€์ƒ‰ +์Ÿ์Ÿ +now +Assoc +์ง€๋‚˜์น  +๋ฝํ•  +๊ณจ๋ฐ‘ +์••์Šน +์œ ์—… +##๋‚˜๊ท€ +๋งŒ์‚ฌ +๋ฒ ๋‹ˆ +๋น„๋Š˜ +์„ฑ์™„ +ํœ˜๋‘˜๋Ÿฌ +ํ•„์ˆ˜ํ’ˆ +์˜ฌ๋ ค๋†“ +ํˆญํˆญ +์–ผ๋งˆ๊ฐ„ +๋‚˜๋ˆ ์„œ +##ana +์œ ํ•™์ž +PL +City +Mich +##46 +์‹œ๋Ÿฝ +์—ฌํ•˜ํŠผ +Korean +๋‘๊ฐœ๊ณจ +๋˜์‚ด์•„ +๊ณ ์ฆ +์ฒดํ—˜ํ•™์Šต +๊ฑฐ๋ฒ„ +์šฉ๋‘ +๋Œ„๋‹ค +ํ‰์ดŒ +ํ”ผ์ƒ +์ดํ•ญ +์Šคํƒ ํผ๋“œ +ํš๊ด€ +ํฌ๋ฆฌ์Šคํƒˆ +ํฅ๊ตญ์ƒ๋ช… +์‚ฌํšŒ๋ด‰์‚ฌ +##๊ณต๊ฒฉ +2700 +๋ฃจ์Šค๋ฒจํŠธ +๊ฐ€๋กœ์ˆ˜๊ธธ +์žกํž +๊ฐ„์งˆ +##์šฐํšŒ +์ž˜๋ ค +##OL +์•Œ๋ ‰์‚ฐ๋“œ๋ฆฌ์•„ +์œ ์‹ +๋Šฅํžˆ +๋˜์ „ +๋„๋ จ +๊ท€์ดŒ +ํ”ผ์‹œ +์•ˆํ† ๋‹ˆ์˜ค +๊ท€์ˆœ +##๋ผ๋“œ +##42 +๋ฌดํ•จ +๋ฌด๋ฅด์ต +์œ„๋‚˜๋ผ +์ž๋ž„ +##๋งŒ๋‘ +๋ถ€์ „ +ํฌํƒˆ +๋ธŒ๋ฃฉ +์ฝ”์นญ์Šคํƒœํ”„ +๋‹ค๊ธฐ +ํฌํ•œ +๋น„๋‹ํ•˜์šฐ์Šค +์˜ค๋กฏ +์—ฐ๋‚จ๋™ +##๋ฏผ์ • +๊ฐ•๋ฆ‰์‹œ +Pre +##์ผ๋ ˆ๋ธ +๊ตฌ๋ฌธ +์ž„์„ฑ +##TT +God +๋ฏผ๊ฒฝ +์ด๋™์‹ +์ด์”จ +์ง•์กฐ +##urg +์‹ค์™ธ +์žฅ๋งˆ์ฒ  +์ˆ˜๋‘ +๊ธฐ๋‹ค๋ฆผ +์ƒ‰์กฐ +ํ˜ผ๋ก€ +๋ณดํ—ค +๋ผ์—ฌ +RC +๋งˆ๋‹๋ผ +์ด์•  +์ง€ํ›ˆ +๋„์ถ• +Art +ํ™ฉ๋Ÿ‰ +์ƒˆ๋‚ด๊ธฐ +๋ฌต์‹œ +์—ผ์ƒ +์ฐจ์šฐ +๋งŒ์„ +์‚ผ์ฒญ +Time +##alt +๋งˆ์‹ ๋‹ค +์•”์Šค +1884 +๋‚ด์•ผ์ˆ˜ +์Šคํ‘ผ +๋ณด์•ฝ +we +์—ด์‹ฌ +TC +ํ—ˆ๋“œ +๊ฒŒ๋‚˜ +๋™์‹ฌ +##๋Š์งˆ +660 +๊นจ๋œจ๋ฆฌ +์ด๋ฐ˜ +์„ธ๊ณ„์œ ์‚ฐ +๊ณ ๋‹ฌ +์ตœ์› +๋Œ€์žฅ๊ตฐ +##์—ฌ์™• +๋ง์„ค์ž„ +์—ด์˜ +์ „๊ด€ +ํ™”๋‚˜ +๊ฒฌ์ฃผ +๋ณด์‚ฌ +๋ง‰๋Œ€๊ธฐ +๊ฐˆ๋ผ์ง„ +์•„๊ธฐ์ž๊ธฐ +์ƒ๋ณ‘ +์—‡๊ฐˆ๋ ธ +์Šค์Šค +์‚ฌ๋ฒ•์—ฐ์ˆ˜์› +๊ทธ๋„ค +๋–จ๋ฆผ +ํ™œ๊ธฐ์ฐฌ +๊ณ ์ฃผ +๊ฐ„์ฒญ +##ied +์‚ด๋ฆผ์‚ด์ด +๋ถ„๋‡จ +ํŒ”์ƒ +๋ฐœ์ž์ทจ +Trans +๋ฐ˜์›” +์„ ํฌ๋ฆผ +์ง€๊ป„ +์ž„๊ด€ +๊ฐ„ํ†ต +๊ทน์„ฑ +์นด๋ฆฌ๋ธŒ +๊ฐ€๋ฐœ +##์ฝฐ๋„๋ฅด +##tim +๋ถˆ์–ด์˜ค +##๋ฐ์ž +์•Œ์•„๋ƒˆ +##๋ฏธ์•ผ +๋Œ์–ด์˜ฌ๋ฆด +์ง„์ฒœ +560 +์„ ์ฐฉ์žฅ +์ƒ์—… +๋ฐฐ์žฌ +๊ต๋งŒ +๋ฏธ์˜จ +##์ ใ† +##๋ฐ์ž๋„ค์ด +์„ ์„œ +ํ™ˆ๊ตฌ์žฅ +๋…ธ์•„ +##โ˜…โ˜… +์ง„๋ณด๋‹น +ํ…Œ๋ฆฌ +์œผ๋žด +ํ•ฉํ•œ +์‹ ๋ น +์Šคํฌ๋žฉ +ํ’์กฐ +์†Œ๋ง๋ฆฌ์•„ +๋ฐฐ์งฑ +##๋„ค์ดํ„ฐ +์ œ๋ณด์ž +๋‘๋ ค์›  +์‚ฌ๋ˆ +์˜ˆ์† +ํ™๋ณด๋ฌผ +์œ ์•„๊ธฐ +์—์ฝฐ๋„๋ฅด +์ขŒ์ดˆ +ํ›„์„ธ์ธ +๋””๋น„์ „ +##RC +๋ด‡๋ฌผ +๊ฐ€์šฉ +์์„ฑ +์žฅ์žฌ +ํ•˜ํ•œ +์ž์œ„๋Œ€ +์Šน์‚ฐ +##๋ฒค์ฒ˜ +๋“ฑ๋ฐ˜ +227 +๊น€์ง„ํ‘œ +ECB +official +๋ชฉ๊ตฌ๋ฉ +์ฐธ์ƒˆ +ํ† ์ŠคํŠธ +์ƒ๋ฅ˜์ธต +๋ฐ”๊ทธ๋„ˆ +ํ”„๋ฆฌ๋žœ์„œ +๋•Œ๋ฆฐ +##๊ทธ๋กœ +๋ˆˆ๋ถ€์‹œ +์˜ค๋…ธ +๋ฒ ๋„ค๋”• +์†์…ˆ +์ง์„ค +์ฐฐ๋–ก +๋ณต์Œ์„œ +๊ธฐ๋ฌด์‚ฌ +##์†Œ๋‹ˆ +๋ฐ‹๋ฐ‹ +์•„์šฐ์„ฑ +์ผ๋ฆฌ๋…ธ์ด +์„ฑ์™„์ข… +๊ตฌํš +๋žญํฌ +##47 +๋งˆ๊ทธ๋„ค์Š˜ +์ฐจ๋‹จ์ œ +์”Œ์›Œ +์ตํ˜” +์„ธ๋ธ์ผ๋ ˆ๋ธ +์‚ด์ธ๋ฒ” +๋ฆฌํฌํ„ฐ +์ฃผ์˜ +๋ฎ๋ฐฅ +ํ•œ์˜์‚ฌ +##otd +๊ด€์ค‘์„ +์ •๋Ÿ‰ +ํ™˜์ ˆ๊ธฐ +##oung +์•„์‹ค +ใ†์‚ฌ +๋ˆˆ์‚ด +๊น€์ƒ๊ณค +์„ฑ์‹  +๋‚ด๋ ฅ +Christ +๊ณ ์ฐธ +๊ฐ€๊ฟ” +๋ฐ”๋ฅดํŠธ +์˜ˆ์ฒด๋Šฅ +##PI +์ผํšŒ์„ฑ +๊ฐ€ํ•œ +์ƒํ•œ์„  +๋Œ€ํ•™์›์ƒ +์ฒจ๊ฐ€๋ฌผ +์ €๋ฒ„๋ฆฌ +ใ†์‹œ +ํ†ˆ์ง„ +##๊ฐœํ˜ +##๋Šฅ๋ ฅ +์ถ”๋„ +๋ฐ๋‹ˆ์Šค +ํ•ญ๊ฑฐ +Law +ํ•˜์ดํŠธ์ง„๋กœ +์นด๋ˆ„ +๊ณต๊ณต์‹œ์„ค +๋กœ๋งˆ์ธ +์†Ÿ๊ตฌ +๊ฐ€์ •ํญ๋ ฅ +๋ฌผ์œผ +๊ตฌ์‹ฌ์  +๋ฒ„ํŒ€๋ชฉ +##์ฐจ๊ณก +์ฐจ๊ณก์ฐจ๊ณก +๋‰ด๋”œ +์‘์‹œ์ž +๊ทธ์ด +์™•๊ด€ +์žฅ๋ง‰ +์Šน๊ฐ•๊ธฐ +##be +##์ง‘ํ–‰ +์‹ ํ•ด +##ICE +ํ†ต๊ธฐ +๋„๋ง์ณ +BNK +์ž๋ž€๋‹ค +์žฅ๊ฒฝ +์ค„๋ฌด๋Šฌ +๋‹ค์ด๋ฌ˜ +๋ฐฐ๊ณ ํ”„ +๋‹น๋‚˜๊ท€ +TPP +๊ณต์—ฌ +์ €์˜จ +์ „๋‹จ์ง€ +##๋กœ๋กœ +##๋‹ค์ด์Šค +ํฌ๋„๋‹น +๋™์กฑ +์™€์นด +์ “๊ฐˆ +๊ตด๋น„ +๋ฃจ๋‚˜ +์šฐ๋  +ํš๋‹จ +##ํด๋ฆฌ +์ˆ˜์น˜์‹ฌ +๊ณจ๋ผ์„œ +๋งˆ์ธ  +์‚ฌ๋Œ€ +์—ฌ๋ž˜ +๊ทธ๋ ˆ๊ณ ๋ฆฌ +๋‹จ๋„ +๋‘๋งŒ +์“ฐ์ดจ +540 +์“ธ๋ชจ์—† +##๋ฐ์ž๋„ค์ด๋ฃจ +๋˜๊ฐ€์š” +๋ ˆ์˜ค๋‚˜๋ฅด๋„ +๋ถ€๋žด +##์š”๊ธˆ +๋Œ๋ ค๋ณด๋‚ด +๋ฆฌ์šฐ๋ฐ์ž๋„ค์ด๋ฃจ +์˜น์ง„๊ตฐ +์š”๋กœ +์ •์ฒญ +##ํ‹ฐ๋„จ +์นจ์‹ +์•Œ๋ฆฌ๋ฐ”๋ฐ” +๊น€๊ด‘์„ +๊ถ์˜ˆ +๋ฟœ์–ด๋‚ด +##๋ผ๋Š” +์œ ์‚ฌ์‹œ +๊ฐํšŒ +๋š๋š +๋Œ€๊ด€๋ น +์นด์‚ฌ +ํ”ผ์…” +์ค‘์—ฝ +์งˆ๋ฆฐ +##์—ฐ์žฌ +Business +์‚ฌ๋ฌด์žฅ +๋Œ€๊ฐ +์ฒœ์‹  +์žฌ์•ผ +์†์ธ +##์ €๋Ÿญ +๋Œ๋ฆฌ +๋ฌธ์ œ์‚ผ +์ „์ฒด์ฃผ์˜ +๊ฒฝ์› +##๋ผ์ด์–ดํ‹ฐ +##์ฝ”์ง€ +์˜ฌ๋ผํƒ€ +์ถ”์ง• +์‹œ๋ฃŒ +ํ™ฉ์žฌ +์˜ค๋ช… +์ž„์•ผ +๋ฐ”๋กœํฌ +๊น€์œ ์ • +ํžˆ๋“  +๊ฐ€์ ธ๊ฐˆ +๋ฐฐ์„ +ํ˜ผํ˜ˆ +๋‹น์ˆ˜ +๊ฒจ๋ˆ„ +๋ผ๊ฑฐ๋‚˜ +๊นƒ๋“ค +๋ถ„์‡„ +##๊ผฌ๋ฐ• +๋ฏธ์ž +์œผ์“ฑ +๋ฌต์‚ด +##๋Ÿญ์ €๋Ÿญ +๊น€์žฌ์ฒ  +๊ผฌ๋ฐ•๊ผฌ๋ฐ• +๋‚ ๋›ฐ +๋ฒ„๋ผ์ด์–ดํ‹ฐ +๋Œ€๋ณ„ +์ตœ๋™ +##๊ณ ์‹œ +์ด๋ฏผํ˜ธ +๋…ธ๋…„์ธต +ํ†ต์‹ ๋ถ€ +๋…ธ๋ณด +๋ฌป์–ด๋‚˜ +์ œํŒจ +ํ† ๋ฏธ +์˜ค๋ž‘์บ +์—๋น„ +##์น˜์˜ค +##๋ ‰ํŠธ +๋ณผ๋ฆฌ๋น„์•„ +KC +๋ฐ์š” +##86 +๋ˆˆ๊บผํ’€ +๋ฉ”๋ผ +๋นคํžˆ +ํŠน๋‹จ +ํ‡ด์ƒ‰ +ํ•œ๊ตฌ์„ +๋น—๋Œ€ +์œ ํœด +๋•ํƒ +๋ณ€ํƒœ +์•ˆ๋“œ๋ ˆ์•„ +์—†์•จ +##for +์•„์ˆ˜๋ผ +๋ฏธ์ œ +๊ฑฐ์œ„ +์•ž๋ถ€๋ถ„ +##cal +์—„๋ฐ€ํžˆ +์†์ฃผ +๋ฌดํ™” +์‘ฅ์‘ฅ +์œ ๋ž‘ +๋™๋…„ +๊ฑท์žก +์‹ค๋ฃจ์—ฃ +ํŒจํ•˜ +๊ณจ๋‹ค +๋†๊ฒฝ์ง€ +๊ณต์˜ํšŒ +์ผ๊ตฐ +ํž˜์ฃผ +์—ฐ์šฐ +๊ฐ€๋ฆฐ +ํ™€๋“œ +##ular +์ข…์  +๋งˆ์šดํŠธ +๋ณ€์ข… +BT +์•”๋ฌต +์ถฉ์„ฑ์‹ฌ +๊ฐ€๋ฌผ +ํ–‰์ •๋ ฅ +Management +Research +๋ช…์ฃผ +์•„์—ญ +์›์ƒ‰ +ํƒœ์˜ +##์„ฑ๋Œ€ +##๋‚˜๊ฐ„ +can +๊ด„๋ชฉ +๊ธฐ์›ƒ +๋ปฃ๋ปฃ +ํ”๋“ค๋ฆด +ํŒ๊ฐ€ +๋ฐฑ์„ค +์—ฌ์„œ๋Š” +1860 +์ง„์—์–ด +๋ณ€๋ณ€ +์˜ค์˜ +์ง•ํฌ์Šค +๋‹ฌ๋ž‘ +๋ฐฑ๋ฒ” +๋ชจ์•„์•ผ +ํ™˜ํ˜ธ์„ฑ +์น ๊ณก๊ตฐ +์๋‹ˆ๋‹ค +์‚ฌ๋ฏธ +์ด์ œ๊ป +๊ตฌ๊ธฐ +##SI +์ธ์†” +์ฐจ์ƒ +์ž์œ ํ˜• +ํ™์˜ +์•Œ์•„๋ณธ๋‹ค +์™ธ์ œ +๊ฐ์ฒญ +๋‹จ์ดˆ +##ํšŒ๊ด€ +๊ธฐ๋‹ค๋ฆฐ +work +์†ก์Šน +์ถ•์ถ• +๊ธฐํ‘œ +๋ง‰๋ฌด +์•„๋ฏธํƒ€ +๊ณ ๊ตฐ๋ถ„ํˆฌ +๋‚˜์ง +##๋ธ”๋Ÿฌ +์ด์ƒ๋ฏผ +์ œ์–ธ +##์‹œ์•ˆ +๋ฒ•์ „ +๋Š˜์–ด์„  +์•ฝ์ œ +์ง•๋ณ‘ +๋‹ค์‹œ๋งˆ +๋ง‰๋ฌด๊ฐ€๋‚ด +๋ง๋‹จ +๋ณผํ„ด +์ ธ์š” +์›์„ธํ›ˆ +๋ฌดํ‘œ์ • +์•™์ฝ”๋ฅด +##๋“œ๋ž˜ +##๋ณดํ˜ธ +๊ตฌ์ „ +์œ ํƒ€ +์€๋‘” +์ฃผ์„ธ +ํด๋ผ๋ผ +์ซ€๋“ +์ •์„ค +CMA +๋ถ€์‚ฐ๋ฌผ +์Šคํ”„๋ ˆ์ด +๋™์ฐฝํšŒ +##๋ Œ์Šค +246 +๋ฐœ์ทŒ +์—๋ฆฌ +##๋ฒ„๋ฆฌ์ง€ +๋ฌด์šฉ์ง€๋ฌผ +์ž๋ฌผ์‡  +Win +๋‘๋“œ๋ ธ +๋ชฉ์ˆ˜ +๋ฐฉ๋ž‘ +ํฌ๋ณ‘ +๋ธ”๋Ÿฌ +์‹ฌ์ฒญ +##96 +์ตœ๊ณ ์  +1870 +๋ฏธ์ผ +์‚ผ๊ฑฐ๋ฆฌ +์šฉ์„  +์ด์ถ˜ +sub +์˜ค์ฆˆ +๊ฒฉํ•˜ +##๋†์‚ฌ +์•‰ํžˆ +von +์น˜์ˆ˜ +ํ˜ธ์šฐ +ํ™˜๊ฐ +ํŽ„ํŽ„ +##rip +ํ”„๋กœํ† ์ฝœ +๋ฏธํŠธ +##els +์œ„์Šค +๋‚˜์ผ +๋ง˜๊ป +##๋ผ์น˜ +์ธ์‚ฌ๋™ +FDA +๋ง๋ ธ +๋“์ธ +ํ—›๋˜ +์ฒด์ง€๋ฐฉ +ํ’๋…„ +ํ—ˆ์šฐ +์˜ค๋‘๋ง‰ +๋ˆ๋ฒŒ์ด +์ด๋ช… +##๋›ฐ๊ธฐ +์ผ๋ฅ  +์œค์ง€ +##๋ชจ๋‹ +์ง€ํ•˜์‹ค +์ดˆ๋ฏธ์„ธ +์ž์žฅ +์น˜์ด +##๊ฑฐ๋ž˜์†Œ +Cap +ํ–‰์ง„๊ณก +๋™์ง„ +๋ ˆํฌ์ธ  +์ง„๋‘ +##์ค€ํ˜ +๋ฐฑ๊ณผ์‚ฌ์ „ +ํƒ•์ˆ˜์œก +๋ถ์ƒ +์„ ์•… +์žํ +๋งž์ถฐ์•ผ +ํ†ฑ์Šคํƒ€ +์ œ๊ฐ๊ธฐ +๋– ๋งก +๋‚ ์•„๋‹ค๋‹ˆ +์นธ๋ง‰์ด +์ด์„ ๊ฑฐ +์•„์•… +์ง€๋‚œ๋‚  +์šฐ๋„ +์ฒ˜๋‚จ +๋Œ์•„๋‹ค๋…” +์œˆ๋“œ +์ž๋ฉฐ +๋Œ๋ณ€ +ํ†ต์พŒ +ํ•˜์—ผ +์˜ค๋ฏธ์ž +##ara +rep +์€ํ‰๊ตฌ +๋‚œ๋‹ค๊ณ  +๋ฌดํ˜ +์œ ๊ธ‰ +##์น ์„ฑ +255 +์ถ”์ง•๊ธˆ +ํŠ•๊ฒจ +##๋คํ•‘ +๋น„ํ‚ค๋‹ˆ +๊ตฌ๊ฒจ +๋ฌธ๋‹ต +์ง€ํผ +ํ›„๋ฐœ +๋ชจ๋ฆ„ +์—ฌ๋… +์ฐฝ๋…€ +๋ฌด๊ถ๋ฌด์ง„ +์ฃผ๋ˆ… +ํ”ผ์ฒ˜ +๋ฏธ๋‚˜๋ฆฌ +๋ฐฑ๋งˆ +์ž ๊ทธ +๊ทธ๋งŒ๋‘” +##ใ…‹ใ…‹ใ…‹ใ…‹ใ…‹ +์˜ˆ์ฐฌ +์˜ตํ‹ฐ +ํ‚คํ”„ +##๋„คํŠธ +๊ฒจ๋“œ +์•ˆ๋Œ€ +์ •์œ ๋ผ +์–Œ์ „ +ํ์œ„ +๋งˆ๋ฅด์ฝ” +์œ ๋ฆผ +ํ˜ˆ์ „ +์ฒ ๋ถ„ +##๋ดํ‹ฐํ‹ฐ +์”จ์”จ +##์ €๊ฐ€ +๋…ธ๋ฃจ +๋“ค๋œฌ +์•„์ด๋ดํ‹ฐํ‹ฐ +๋”๋ผ๋Š” +์™ธ์žฅ +์˜ค๋ฆญ์Šค +ํ•ฉ์ณ์„œ +##ํ‹ฐ์›€ +์ฐจํ›„ +##๊ตญ์‚ฌ +์•„๊ผˆ +๊ฐœ๋ฐœ๋น„ +##ํˆฌ์•„๋‹ˆ์•„ +์ด์ถœ +ํ’‹๋ณผ +Pri +ํƒ„์•ฝ +๋‚˜์˜ด +์Šน์ˆ˜ +##์‚ฐ์ง€ +๋”ํ•œ๋‹ค +๋ชจํ•จ +##41 +๋ฌ˜๋ฏธ +๋ฐ•์œ  +ํก์กฑ +ํŒŒ๋ผ๋‹ค์ด์Šค +์—ฌ๋ฐฑ +๋ฒจ๋ฆฌ +์—๋ฒ  +์ž…๋‹ด +๋ฐ”๋‹๋ผ +๋ฐ˜๋คํ•‘ +๋ณด๋กœ +๋งž์ดํ•ด +ํ•˜์ˆ˜๋„ +๊ตด์ฐฉ +์‹ฌํ•  +์Šคํฌ์ธ ์นด +๊ฑฐ๋ฏธ์ค„ +์„ ๋…€ +HA +Fri +๋ ˆํผ +๊น€์น˜์ฐŒ๊ฐœ +ํ•˜์–€์ƒ‰ +๋ฐœ๊ธฐ์ธ +์—ด๋ณ‘ +ํŒŒ๋ฉด +##uty +##ํผ์Šจ +์„ฑํ™ฉ๋ฆฌ +๊ณ ํ™”์งˆ +๊ตฟ๋ชจ๋‹ +๋ถˆ๊ฐ +์†Œํ”ผ +์ œ๋ณต +ํ™์ฐจ +๋งค์Šค์ปด +๋™๊ณ  +##๋‘๋ถ€ +๋ฐฐ์ƒ๊ธˆ +##์•™์Šค +๋ฆฌํˆฌ์•„๋‹ˆ์•„ +๋กœ๋“œ๋ฆฌ +์•ˆ๋‚ด๋ฌธ +์…”ํ‹€๋ฒ„์Šค +๋‚จ์ค‘๊ตญํ•ด +๋„ํƒœ +๋น„์ž๊ธˆ +์”จ๋ฐœ +์ œ์ณ +๊ฐ•๋‹จ +์œ ์ข… +์ž์–‘ +๊ฐ€๋น„ +ํ‚ฌ๋กœ๊ทธ๋žจ +ํ˜•์งˆ +Die +์„ ๋ง +##์บ์ŠคํŠธ +๊ฑด๊ฐ•์‹ +232 +๋…๊ณผ์  +์–‘๊ธฐ +##์šฐ๋ผ +##83 +ํŠน์ •์ธ +๊ตฌ๋‹จ์ฃผ +ํŠน์‚ฐํ’ˆ +๋กœ์ง์Šค +vol +๋ฏธ์šด +GW +๋™์œ„ +๋น„์—ด +์ฒ ๊ฐ•์—… +์ฑ„๊ตด +221 +๋‚˜๋ž˜ +๋ฑ€ํŒŒ์ด์–ด +๋ฐ”ํŠธ +์ฆ์‚ฐ +๋“œ๋กœ์ž‰ +๊ด‘ํƒ +์„œ์Šค +ํŠธ๋žจ +ํ•˜์˜ +๋Ÿฌ์‹œ์•„์ธ +ํ˜•์ด์ƒํ•™ +DA +์ผ์ƒ‰ +์นดํŽซ +##์…ฐ๋น„ +ํ…œํฌ +ํŠน์ž„ +##ien +ํ‘ธ๋ฅธ์ƒ‰ +ํŠ€์–ด๋‚˜์˜ค +๋ธŒ๋ž˜๋“œ +๋กœ๋ Œ์Šค +##๊ทœ์–ด +์›…๋ณ€ +๋ฌดํ•จ๋งˆ๋“œ +์‚ฐ์€ +๋งˆ์ˆ ์‚ฌ +๋“œ๋ฆฌ์›Œ +๊น€๋ด‰ +์ •๊ทผ +๋ถˆ๋ชจ +๋‚œ๋™ +##๋ฌด์—ญ +์˜ˆ๊ณ ํŽธ +๋‹จ์•„ +์ƒ๊ทผ +๊ฐœํƒ„ +๋ณดํ•ฉ +History +๊ตณํ˜” +High +Center +๋Œ€๋“ค +๋’ค์—Ž +ํŠธ๋ž˜ํ”ฝ +๋‚ฏ์ต +์šฐํŽธ๋ฌผ +์•„๋กœ๋งˆ +์˜๊ฒฐ +๋ฐฐ์›… +๊ธฐ๊ปํ•ด์•ผ +๊ด€์› +ํˆฌ๋ฐ• +์•”๊ฐ +ํ•ด๋ณ‘ +์žฅ๋…€ +์‹ ๋…„์‚ฌ +์˜ค๋„ +ํ•™๋‚ด +์•ˆ์ „์žฅ์น˜ +์จ๋‹ˆ +NYT +๋ฉด์ฃ„ +๋‹ค์ด๋ ‰ํŠธ +๊ฑธํ”„ +์‚ฐ์† +์ƒ์‚ฐ๋ฌผ +์šฐ๋ฆผ +๊ฐ€์  +๋ฉ”์ดํŠธ +ํ„ฐํŠธ๋ฆฌ +์†์‚ญ์ด +์ฃผ์น˜์˜ +์ €์งˆ๋Ÿฌ +ํ•œ๊ณ„์„  +๊ธฐ์ „ +์š”ํฌ +ํ•˜ํ‚ค +265 +๊ฒŒ์œผ๋ฆ„ +JS +๊ธฐ๊ด€์ด +์กฐ์ง„ +์Šฌ๋กœ๋ฐ”ํ‚ค์•„ +์šฉ๋ชจ +##์ œํ•œ +์กฐ๋ฏธ๋ฃŒ +๊ธ‰์„ ๋ฌด +ํ™”์ด +๊ฐ€๋ฅด์‹œ์•„ +๋ถํ•ญ +์ธ๋„์ธ +๋ฒ„์ “ +##na +๋ฆฌ๋‹ˆ +๋งค๊ฐœ์ฒด +##๋ผ์ด์–ธํŠธ +๊ฐ•๋ณ‘ +์ ์™ธ์„  +ํžˆ๋ฉ” +๊ต๊ด€ +์ ˆ์ƒ +์ •์ˆœ +๋Œ€์–‘ +๋‚ด๋ง‰ +๋ฏผ๋…ธ์ด +๋ถ€์› +๋“ฑ๋ถ€ +๊ตณ๊ฑดํžˆ +##ju +๊ตฐ๋ฐ๊ตฐ๋ฐ +์„ค์ „ +๋ง์‚ด +๊ฐ€๋ธŒ๋ฆฌ์—˜ +์ด์„œ +๋กœ๋ฒ ๋ฅด +##๋ถ€๋“ค +๊ณ ์ณค +์ค€์šฉ +ํด๋ผ์ด์–ธํŠธ +ํ•™์—ฐ +๋ฐฐ๋ถ€๋ฅด +์‹ ํƒœ์šฉ +์Œ์ง +##๋ผ์ด์ฆˆ +##๋ชจ๋‹‰ +๋ฒ ์ด์ง€ +๋‹ฌ๋ผ์„œ +์™ธ์•ผ +์ฑ„์ง‘ +##์„ ์–ธ +ํ”Œ๋ฃจํ†  +๋ถ€ํ™” +์˜ฌ๊ฐ€ +ํ‹ˆํƒ€ +ํ™ฉ์„ฑ +์•ค๋“œ๋ฃจ +ํ†ต๋‚˜๋ฌด +๋ฌถ์Œ +๋‹์Šจ +ํ• ์ฆ +์กฐ๊ฐ์ƒ +๋ณธ์˜ +๋ฐ˜๋Œ€ํŒŒ +์‚ฌ์ด๋จผ +RP +Science +๋ชฐ์•„๊ฐ€ +##ํดํŠธ +์Ÿ์˜ +์‹œ๊ฐ„ํ‘œ +ํŒจ๊ธฐ +##ury +ํ…Œ๋งˆ์ฃผ +๋ฌด๊ฐœ +๊ฒฝ๋™ +๋ฉด์ง +##72 +์• ๋“œ +์งˆ๊นŒ +##๋”๋ผ +Tel +๋ด‰ํ•ด์กŒ +๋ถ€์žฃ์ง‘ +์‹œ๋…€ +์Œ“์˜€ +์—ฐํ•ฉ๊ตญ +์ฒซ๋ˆˆ +๋‹ฌ์•„์˜ค๋ฅด +ํŠธ๋ ˆ์ธ +๋‚œ๋กœ +์„ฑ๋ƒฅ +์•กํ™” +์›์ฃผ์‹œ +ํ•ด์•ˆ์„  +์นญ๊ธฐ์ฆˆ +๊ฐ์น ๋ง› +๋ฐ˜๋ž€๊ตฐ +๋™์ฃผ +์‚ฌ๋ฅดํŠธ๋ฅด +์†ก์˜๊ธธ +##ica +ํ๋ ˆ์ดํ„ฐ +ํ‰ํƒ„ +์ทจ์•ฝ์  +๋‹ค๋ฉด์„œ์š” +์šฉ์˜ +212 +์‚ผ์œ„ +๋ฉด์‚ฌ +##์ด๋ฉด +ํ˜‘๊ณก +๋ถ€๋ฅดํฌ +์ด๊ฑธ๋กœ +์‡ผ๊ตฐ +##์ฃผ๋‹ˆ์–ด +ํ–ฅ๋ฐฉ +๋ฒ ๋ฅด๋”” +๋Š˜์–ด์ง€ +๋ฏผ๋“ค๋ ˆ +์˜จ์ข…์ผ +์ž„์ž +๋˜์‚ด์•„๋‚˜ +์˜จ์ˆ˜ +์˜ค๋ž˜๊ฐ€ +์„ ๋ถˆ +์‹ค๊ถŒ +๋ชจ์ž๋ž€ +์ €๊ธฐ์•• +๋ฐ•์ฐฝ +##์ฑ„๊ธฐ +233 +ํ›„์ง„๊ตญ +๋ฆฌํผ +๊ตฌ์ถ•ํ•จ +๊ตฌ๊ฑธ +์™„ํŒจ +๋ถ€๋Ÿฌ์›€ +์ˆ˜์‚ฌํ•™ +์ผ๊นจ์šฐ +๋ง˜๋Œ€๋กœ +๋˜๋Œ๋ฆฌ +์ทจ๋Ÿ‰ +๋– ์˜ฌ๋ฆด +์ˆ˜์ฆ๊ธฐ +์กธ์† +fe +๊ฐ€์ฒœ +์ •๊ฐ +์œ ๋… +๊ธ‰์ƒ์Šน +๋ณ‘๋ฆฌ +##๋‹น์ดˆ +ํƒ€๊ณ„ +์—ฐ๊ทน์ œ +๊ธฐ๊ณต์‹ +์กŒ์œผ๋‚˜ +ํ‘ธ๋… +์‚ผ์„ฑ์ „๊ธฐ +์ˆ™์ธ +์•„๋žŒ +์ฒญ๋ ด๋„ +์ง„๋“œ๊ธฐ +์€๋ฐ€ํžˆ +๋ผ์ด๋„ˆ +๋„์šฉ +211 +๊ฒฝ๋ฌด +##ํ† ์‹œ +ํ•„ํ•˜๋ชจ๋‹‰ +๋Œ€๋งˆ๋„ +us +๋ณ‘์กฐ +๋งž๋ฌผ๋ฆฌ +##๋ณด์Šค +๋ ˆํผํ† ๋ฆฌ +Act +##82 +DGB +์บ”์ž์Šค +ํ—๋ฆฌ +๊นœ๋ฐ• +##97 +๋‰˜์•™์Šค +๋ฐœํŠธ +์‹œ๋ฐœ์  +๊ด‘์ง„๊ตฌ +์ „๋ฉธ +##ํ˜ธํ…Œ +๊ฐ์‚ฌ๊ด€ +๋‚œ๋‹ค๋Š” +๋ชธ์ฒด +๋ฌ˜๋น„ +์—ฐ์ง€ +##์Šคํ† ํฌ +Fe +๋Œ€๋œธ +##๋ณด๊ฐ +๋ณด์‹œ +##์ •์‹  +์—„์ง€์†๊ฐ€๋ฝ +sm +๋ถ€์—ญ +ํ•ด๊ธˆ +##๊ณต์ฆ +๋น„๊ธฐ +์—ญ์  +์žฌ๊ทœ์–ด +ํ•œ๋‚จ +##eck +##์–ธ์ • +๊น€์ •๋‚จ +##๋ฐ˜์ง +๊ตฌ์†๋ ฅ +๋ชฐ๋ฆด +์Šคํ… +ํ…Œํ—ค +##92 +์ฝง๋ฌผ +๊ทธ๋žฌ์—ˆ +์‹ธ๊ตฌ๋ ค +์ผ๋™ +๋’ค์ด +ํ•์ค„ +##ํ‚จ์Šจ +๋„๋„› +๋Œ€์‚ฐ +์ˆ˜์žฌ +Te +ํดํฌ์Šค +ํ”„๋ž€์ฒด +์ด์ถœ๋™ +๋‹ด์‚ฌ +์†์—ฐ์žฌ +##๋œจ๋ฆฐ๋‹ค +๋ถ™์žกํ˜€ +๊ฑฐ์ œ์‹œ +##ino +์‹œ๋ฏธ +์ผ๋ฌธ +์ปจ์…‰ํŠธ +๋ถ€๋“ค๋ถ€๋“ค +๋ฐ˜์ง๋ฐ˜์ง +##chen +๊ฐ์—ผ์ž +ใ†๊ตญ +์„ค์›€ +์ด๊ทผํ˜ธ +๋ ˆ๋ฏธ์ฝ˜ +์ง€๋ƒˆ์œผ๋ฉฐ +์ „์šฐ +์ž…๋ฒ•๋ถ€ +์†Œ๋ฐฉ์ฐจ +๋‚˜๋ฅธ +##81 +๊ธฐ๋ณธ๊ธ‰ +๋“ค์ด๋งˆ +๋ ˆ์ด๋‹ค +๋ช…์ฐฝ +ํ˜•์„ธ +์Šˆํผ์ฃผ๋‹ˆ์–ด +POP +๊ฐ๊ฐ€ +๋ ŒํŠธ +์• ํ™˜ +๋‚ด์„ธ์šธ +๋“œ์…จ +์Šน์ „ +๋‹จ์ผํŒ€ +๊ณจ๋‹ค๊ณต์ฆ +๋ฐ›์น˜ +๋ฐ”๋Š์งˆ +์‰ฌ์›  +๊น€์‹ ์šฑ +๋‚œ์Ÿ์ด +๋ถ€๋ฝ +์ดˆ๋…„ +์‚ผ์œ„์ผ์ฒด +์บ๋Ÿด +time +๊ทธ๋ž€ +๋”ธ๋ฆฐ +์„ฑ์‹œ +Class +์•Œ๋ฐ”๋‹ˆ์•„ +PV +##๋‹ค๋ฆฌ๊ธฐ +์ฐ”๋ € +๋†€๋ผ์šธ +๊ธ์ง€ +##๋š๋š +์ง€๋…€ +์‹๋ฌผ์› +๋นผ์•—๊ฒผ +์„ฑ๋ชจ๋ณ‘์› +๋ฐฑ์ž +##93 +๋น„๊ผฌ +์™ธ๋„ +ํžˆ๋ผ +ํ•ฉ์ˆ™ +ํ™˜๊ด€ +์›ํ™œํžˆ +์†์ฃ„ +##ํŽœํ•˜ +Wil +๊ฒฝ์ถ• +##icy +๋ฐ•ํ—Œ์˜ +๋ฐ”๋น ์„œ +์นด๋„ค๊ธฐ +๊ถŒ๋ฆฌ๊ธˆ +์ž‘์ธ +ํฅ์ • +410 +Tim +์™€์ดํ”„ +์„ ํ—˜ +์—๋ฐ” +์ €์–ด +ํŒ”๋‹ค๋ฆฌ +๋ฌผ์œผ๋ฉด +ํ”ผํ•ด์•ก +ใ†์žฌ +์‘์›๋‹จ +์ „์ž๊ธฐ +์ฃผํ™ฉ +ํ…Œํ—ค๋ž€ +๋‚จ๊ฒจ๋‘” +ํ€„์ปด +ER +๋ผ๋”” +๋งŒ์ˆ˜ +##๋„ค์ž„ +์ค‘์ €๊ฐ€ +ํ•˜์‹œ +##MI +##ington +๋น„๋ฐ”๋žŒ +์—๋””์Šจ +๋„์‹œ๋ฐ” +470 +๊ทธ๋Ÿญ์ €๋Ÿญ +##๋“œ๋ž˜๊ณค +์—ฝ๊ธฐ +์ œ์ •์‹  +์„ ๋™์—ด +๋ธŒ๋ฃจ์Šค +ํ’€ํƒ€์ž„ +##์ƒค๋ฐ” +์•ผ๊ตฌ๋‹จ +XX +๋Œ€๋จธ๋ฆฌ +๊ณผ๋„๊ธฐ +์œ ์˜๋ฏธ +์†๋ฐ• +์ค‘์‚ฌ +์ธ์ฒœ๋Œ€ +๊ด€์•„ +์ง„์‹œํ™ฉ +๋”ฐ์งˆ +##98 +ํŒŒ๊ดด๋ ฅ +๊ท€์ถ” +์‚ฐ๋„ +๋ฌด๋š๋š +์ƒ๋Œ€์‹ +ํ˜ธ์‹œ +ํ˜นํ‰ +##์• ํ”Œ +๋‹ค๋น„ +๋„์ธ +##์นด๋ผ +##๋ณ€์†๊ธฐ +์ถฉ์‹  +์„œํˆด +##๋„ค์‹œ +your +์Šน์ƒ +์ž‘์ค‘ +๋ฒ„๋งˆ +ํ›„๋ˆ +์‹ค์— +๋ฐ”๋ผ๋ดค +8500 +์ •์ƒํšŒ์˜ +๊ฑฐ๋Š˜ +๊ทธ๋ฆฌ์›Œํ•˜ +์ž”๋””๋ฐญ +ํ™์„ +๊ตฌ๋ผ +Mac +ํƒœ๋™ +##์ง‘ํ•ฉ +ํ›„๋ฐ˜์ „ +์ˆ˜์ฒœ์–ต +๋ญ‰์นœ +๋ผˆ์•„ +ํ–ฅ์‹  +๋‹ฌ๋ผ์š” +๊ฒฉ์‹ +๋‚œ์ƒ +ํ•˜์ธ๋ฆฌํžˆ +๋ฐ•์‹  +๊นƒ๋“  +๊ทผ๋ฉด +##์‹œ์•„๋“œ +##๊ฐ€ํƒ€ +๋งž์ถฐ์ ธ +์œ ์‚ฐ์†Œ +Mart +##์ž์‹ +์œก๊ฐ +๋…ธ๋ถ€๋‚˜๊ฐ€ +๋„์˜ˆ +๋ง‰์ค‘ +๋ถˆ์ž„ +์ถœ๊ณ ๊ฐ€ +๋ณ€์กฐ +์œ ๋‹ˆ๋ฒ„์‹œ์•„๋“œ +์žฅ๋ฐ‹ +๋ฐฐ๊ณ ํ”” +ํž˜์ผ +โ €โ €โ €โ € +๋ฉ”์‹œ์•„ +์ •๊ตฌ +๋‚ ์น˜๊ธฐ +์ ˆ๊ฒฝ +##๊ป˜์š” +๊ฐˆ๋ผ์ ธ +์น˜๋ฃŒ์‚ฌ +์ค„๋‹ค๋ฆฌ๊ธฐ +๋ฒŒํŒ +์ดํƒœ๋ฆฌ +์„œ์ฒญ +์ด๋น„ +ํ’€๋ฌด์› +ํ˜„์ž +๋†๊ธฐ๊ณ„ +๋ถˆํ•ฉ +์ง€๋™์› +##๋ฌธ์˜ˆ +๊ฑฐ๋ฆฌ๋‚Œ +๋ฐํ˜€๋ƒˆ +1885 +์†Œ๋‹‰ +ํ—ˆ๋ฆฌ๋  +ํ•ฉ๋ฒ•ํ™” +๋ฐ”๋ฆฌ์Šคํƒ€ +##omen +ํƒœํ™”๊ฐ• +์ฒœ์ฐจ +ํŽธํ˜‘ +##์Š๋ฆฌ +๋ฏธ์ˆ ์‚ฌ +๋™ํ™”์ฑ… +##์ƒ…์ด +๊ฑฐ๋Š๋ฆฐ +์ƒ…์ƒ…์ด +##๋‹ค๋‹ +์šธ์ปฅ +ํ›„์‚ฌ +KAI +๋Œ€์—ฌ์„ฏ +์ฃผ๊ฒ€ +์นด๋‹ˆ๋ฐœ +system +๋’ค์—‰ +์‹œํšจ +ํœดํ•™ +์•„๋งŒ +๋‚จ์นœ +๋ฐ์ด๋ธŒ +๋ฝ€๋ฝ€ +๊ทผ์† +๋ฌธ์ค‘ +์—Š๊ทธ +ํ˜ธ์กฑ +##์šฐ์˜Œ +๊ด€์ฐฐ์ž +์ฒœ์ˆ˜ +๊ฐ„๋Œ€ +๊ฐ•๋ฏผํ˜ธ +์šฐ์น˜ +์ˆ˜ํ™” +๊ท€์–‘ +์ „์—ด +ํ•ด์ง +##๋ฌธํ™”ํšŒ๊ด€ +์ฒญ๋Ÿ‰๋ฆฌ +ํˆฌ๊ฒŒ +์–‘๋…์žฅ +์ค‘๋ฌธ +๊ด€๋ฆฌ์‚ฌ +##๋ชจ์ž +๊น€๊ทผ +์„์ขŒ +RF +Red +๋ ˆํ„ฐ +์žฌ๋ ฅ +ISA +์ œํšŒ +ํ”„๋ฆฌํ‚ฅ +์ผ๊ธฐ์žฅ +##ํƒ€๋‹ˆ +์ฝœ๋Ÿผ๋ฒ„์Šค +์šฐ๋ฒ„ +ํ˜„๊ธฐ์ฆ +๊ฑฐ๋งŒ +์…จ์ž–์•„์š” +์นผ๋นˆ +๊ฐ•ํƒœ +ํ•ด์ค˜ +์›๋น„ +์บ๋”” +๋ชฝ๋‘ฅ์ด +์น˜๋Ÿฌ์กŒ +๋ณผํŽœ +์—ฐ์•” +์ „์–ธ +๊ตฐ๊ด€ +##๊ผญ์ง€ +์„ธ์ข…๋ฌธํ™”ํšŒ๊ด€ +PEF +ํŒ๋ณ„ +ํŒฌํด๋Ÿฝ +์œ ํ’ˆ +##๋ฐฉ์œ„ +์ค€๊ณต์‹ +์†Œ์ง€์ž +์ ˆ๊ธฐ +๊ฒฐ์ „ +๊ท€ํ‰์ด +##๋ฆฐํŠธ +์ง€๋ฐฐ์ธ +๋‹์šฐ +์• ํƒ€ +์—…๋กœ๋“œ +##์กฐ๋ถ€ +Bu +Up +##71 +ํ”ผ๋ถ€์—ผ +์–ด๋ฒค์ ธ +๋ถ์•„ํ”„๋ฆฌ์นด +์„ธํƒœ +์ฒ˜๋งˆ +์›ํฌ +ํˆฌ๊ฒŒ๋” +Black +์•”์ดˆ +๋„๋ผ์ง€ +์œ ๋‹ˆ์˜จ +์—์–ด๋ฐฑ +๊ธฐ์–ด์ด +ํ„ฐ๋œจ๋ฆฐ +๋นˆํ˜ˆ +์•„๋ณด์นด๋„ +์š”๊ตฌ๋ฅดํŠธ +NBC +์บ์ŠคํŠธ +์•„๋‹ˆํ•œ๋‹ค +##์ˆ˜๋ฒ” +์ž„์ฐฝ์šฉ +๋ณธ๋”” +##ํƒ€๋กœ +์˜๊ฒฌ์„œ +์˜ˆ์„ ์ „ +##๋‹ˆ์•ˆ +์Šค๋‹ˆ +์ •๋žต +๊ถŒ์œ„์ž +๊ฐ์ถ• +๋‚œ์ƒ‰ +Service +๋‚š์‹ฏ +##๋ฐ”์˜ค +๋ฌธ์ข… +์•ž๊ธธ +##์—ํ”„ +3200 +๊ฒŒ์ž„๊ธฐ +์ž ์ž์ฝ” +##๊ฐœ๋ฏธ +๋ ˆ์˜จ +๊ต๋ถ€์„ธ +๋ฒŒ๋ ธ +##79 +๊ตฐ๊ธฐ +์†Œ์ถ” +์ฒญ์†Œ๋…„๊ธฐ +์—ดํ˜ˆ +##ffect +๋“ค๋ฝ +์ฟ ๋ฅด๋“œ +์œ ๋ชฉ๋ฏผ +##ridge +๋Œ€์—ฐ +์งค๋ง‰ +##ities +๋žด์˜ค +๋ผ์ž +์‘์ˆ˜ +์šฐ๋ฆฌ๋ง +์…ฐ์ผ +ํ•˜์ž„ +##amil +๋ผ๊ณ ๋„ +ํ˜ธ์‚ฌ +๋ฐ˜ํ’ˆ +์ง€์น  +##์น˜์ฆˆ +๋ชจ๊ตญ์–ด +์†Œ๋“œ +์ธ๋“ค +์ฐฝ์„ธ๊ธฐ +##gan +๋‘๋ ˆ +๋ผํŒŒ์—˜ +๋Œ€์  +๋“ค๋ฆฐ +๋ฆฌ์นด +์ถœ๋  +sel +์„œ๋ณ‘์ˆ˜ +Mag +์„ธ์šฉ +์ƒํ•œ๊ฐ€ +์„๊ณ  +๋„์šด +##31 +##๋งฅ์ฃผ +๋Œ€๊ฒฝ +๋ฏธ๊ฐœ +์ค‘์„ฑ์ž +๋ฉ”์†Œ +๋ชฉํ™” +๋„๋ฏธ๋…ธ +๊ท€๊ฑธ์ด +๋น™ํŒ +##ํผ๋ ˆ์ด์…˜ +ํ•ญ๊ท  +์‹ ๋ž„ +ํ‹ฐ๋ธŒ์ด +์„ ์‹ฌ +์นœํ˜• +ํ•œ์ƒ˜ +๋ชจ๋ผ +๋ณด์‚ดํ”ผ +๋ผ์™” +##๋…ธ๋ฒ„ +์กฐ๋‹ˆ +##ํŒŒ๋žŒ +๋งค์„œ์šด +๋งคํ‘œ +๋˜‘๋˜‘ํžˆ +๋‹ค๋‚˜์นด +์ง„๊ฒฝ +##๋ถ„์ž +์กฐ์ง๋ฒ• +๋น„ํ‹€๊ฑฐ๋ฆฌ +ํ•™๊ด€ +##๋‘๊ณ  +๊ทธ๋Ÿด๋“ฏ +๋งŒ์ง€์ž‘ +๊ฐ„๊ทน +๋ฐฑ์„ +ํ•™๋‹น +##๋งฅ์ƒ +๊ทธ๋Ÿฌ๊ณ  +##์ œ๋น„ +##ํ•ฉ์„ฑ +์ˆ™์ทจ +์˜ํฌ +์งํ‰ +ํ”ผ๋ณดํ—˜์ž +๋งŒํ™”์ฑ… +๋ถ€๋ฆ… +ํ•จํฅ +ํฌ์žฅ๋งˆ์ฐจ +Id +์˜์ˆ  +์œก์•ˆ +##ํ‹ฐ๋‚˜๋ฌด +##์œ ์ง„ +ํ™ฉ์ง„ +์ง๊ฐ„์ ‘ +##EX +์–ด๋“œ๋ฒค์ฒ˜ +์‹œ๋„๋Ÿฌ์šด +์ •ํ‰ +๊ฒฐํƒ +๋ธ”๋ž‘ +์นœ์ˆ˜ +์ˆ˜์—…๋ฃŒ +์—ฐ์Šต์ƒ +ํœฉ์‹ธ์ธ +##ํด๋กœ +์„ฑ์•…๊ฐ€ +##49 +์•ผ์ž +์ฐป์ž” +AFP +๋…ธ๋ชจ +ํŽ˜๋ผ +##์–ด๋จธ๋‹ˆ +##36 +##์„ ์ง€ +๋Œ€ํ˜๋ช… +์ฝ”์Šค๋ชจ์Šค +์ด์ฃผ๋…ธ +##์…ˆ๋ถ€๋ฅดํฌ +์™€์ด๋ฒˆ์Šค +๊ตญํŒ +์–ด์˜ +์ค‘์˜์› +๋ณธ๋‹ค๊ณ  +์„ ์„  +ํ™ฉํฌ +์žฅํŒ +๋‹ค์ณ +๋‹นํ—Œ +ํ—ค๊ฒŒ +๋…ธ์—ญ +์ˆ˜์ž” +ํŒŒ๋…ธ +๋ฐœ๋ˆ +์†Œ๋ฆด +ํ„ฐํŠธ๋ ธ +IV +๋ฝ‘ํžˆ +์œ ํ˜ˆ +##orge +Str +##๋ƒ‰๋ฉด +Air +์„œํ•‘ +ํ•ด๋™ +๋ฐ”๋ฅผ +์œค๋™์ฃผ +##์†Œ์‹œํšจ +Thomas +๋ฌธ์ง€ +##์Šคํ† ์ด +Sim +์‹์ „ +๊น”๊น” +์ตœ์ˆ˜ +๊ณก์ธ +๋‚œ์‚ฌ +๋ณด๊ตญ +์›”ํ„ฐ +๋นจ์น˜์‚ฐ +๊ฐ์‹ +##๋ณด์ฆ๊ธฐ๊ธˆ +๊ต๊ถŒ +๋“œ๋ฆฝ +์„๊ตด +์นดํ†จ๋ฆญ +๋ฉ์ด +์–ต์ƒˆ +์„œ์–‘์ธ +๊ณต์†Œ์‹œํšจ +๋งจ๋ฐœ +##anian +##ological +ํ—Œ๋ฐ +์šฉ๋ฌธ +์ฐจ๋ถ„ํžˆ +์‹œ๊ทธ๋„ +๊ฐœํฌ +์œ„์ฆ +##๋‘๊ทผ +๋‹จ์‹œ๊ฐ„ +์ ˆ์ ˆ +์ข…์†Œ๋ฆฌ +##์ŠคํŠธ๋กœ +์ธํ’ˆ +๋ฒ„๋Ÿญ +ํšŒ๊ณ„์‚ฌ +##ucation +๋™์žฅ +๋‹ค์ด๋…ธ์Šค +์„ญ์ • +๋‚จํ•˜ +์›์ œ +์‹ ์ถ˜๋ฌธ์˜ˆ +ํํ +๊ดด๋‹ด +๊ตฌ๋ณด +##์ƒ์˜ +๊น€๊ตฌ๋ผ +ref +๋งˆ์˜ +ํŠ€๊ธฐ +ํ›„์›ํšŒ +ํ”„๋ผ์ด๋น— +์ปจํ…Œ์ด๋„ˆ์„  +๋ฃฉ์…ˆ๋ถ€๋ฅดํฌ +have +์ „๊ณก +๋Šํ‹ฐ๋‚˜๋ฌด +๋‘๊ณ  +๋Œ€์šด +๋ฌด๋ฆ‡ +WWE +๋ˆˆ๊ฐ +์ฃผ๊ฒฝ +Colle +์กฐ์„ +๋ฌดํ„ฑ๋Œ€ +๋งฅ์•„๋” +๋ณดํ˜ธ๊ด€ +ํ˜„์ƒํ•™ +๋ฌด์„œ์›Œ์„œ +๋ธ”๋Ÿญ +##way +##๋ž‘์Šค +222 +ํ˜ธ์‹ค +##37 +๋ฐ•์—ฐ์ฐจ +๊ทธ๋žŒ +๋‚˜๋กœํ˜ธ +์ธ๋„์ฃผ์˜ +์‚ผ์ผฐ +ํ•˜์—ญ +ro +๊ฐ€๊ตฐ +๋ด‰์‚ฐ +์ž ์ž… +this +ํ•˜์—ผ์—†์ด +๋‚™์–‘ +์˜ค๋” +์˜ท๊ฐ +๋Œ€์„œ์–‘ +๋ฌด์  +๋ฆฌ๋””์•„ +๋ฒ ๋ฅธ +ํŠน๊ณต๋Œ€ +Ibid +๋‹จ์ž +์‹ ์Šน +##ํŽœํ•˜๊ฒ +๋ฆฌ์šฐ์Šค +์ตœ๋ช… +##์ž‘์ „ +##๋„Œ์Šค +์†Œ๋Œ€ +๋กœ์ €์Šค +ํ˜ธ์ฐŒ +์šฉํ•ด +์ง„ํ”ผ +##์—ฐ์„ค +##๊ณ ์ƒ +##ux +##PR +๋‹ค๋ฆผ +##๋ถ€๋žด +##๋‹ค๋ž— +๊ณ„๋žต +์ฝ”ํŽœํ•˜๊ฒ +๋ผ์ณ +##๋ฐ”๋ผ์ง€ +๋„ค์ž„ +๋ ˆ๋ฒ„๋ฆฌ์ง€ +์›๋ฐ˜ +ํƒœ๋ฐ˜ +ํŠ€๋‹ˆ์ง€ +##์šฉ์ง€ +Park +์ฐจ์ฃผ +์ค‘ํ‡ด +๋„๋‚˜ +๋ฐ€๋ ค์˜ค +๋’ท๋ฌธ +์†Œ๋‹ค +ํŒจ๊ฑฐ๋ฆฌ +At +์Ÿํƒˆ +์˜ˆ๋น„์—ญ +๋ณด์‚ดํŽด +##eek +๋‘๊ณ ๋‘๊ณ  +๋งŒ์œ  +์‚ฌ๊ฒฉ์žฅ +๊ณต์˜ˆํ’ˆ +๋ถ์„œ๋ถ€ +์ž์˜์‹ +ํƒ€๋ฆฌ +๋ฒŒ์–ด๋“ค์ธ +๊ดœํ•œ +๋„์‚ฌ๋ฆฌ +Mo +๋Œ๋‹ด +##๊ตฌ์กฐ +๊ณ ํ’ˆ๊ฒฉ +๋””์ œ์ด +์„ค๊ณ„๋„ +๋…ธ๋Ÿ‰์ง„ +##87 +3600 +์•”๊ฐํ™” +๋„๊ธˆ +์ž…ํ•ญ +์†”์„ ์ˆ˜๋ฒ” +๋ชฉ๋œ๋ฏธ +๊ณ ์šฉ์ฃผ +๊ธฐ๋„ค์Šค +์ฐจ๊ฐ +ํ—ค๊ฒŒ๋ชจ๋‹ˆ +๋ณ„๋น› +์—ฐ๊ฐœ +236 +๋ฌดํ˜‘ +์ ๋ฆฐ๋‹ค +๋‚˜์˜ฌ์ง€ +์˜ํ™”์ธ +##iter +3300 +์ €์ž‘๊ถŒ +ํ’์กฑ +ํž์Šค +##์žฅ๋‚œ +ํƒ€์Šค +##IL +๋‹ค๋ผ +๋ฑƒ์‚ด +๋ฆฌ์‚ฌ +##bps +๋ฏธ์ˆ ๊ฐ€ +์žฅ๋ฐ‹๋น› +580 +ํŒ๋ณธ +์ƒดํŽ˜์ธ +##๋ž‘๋“œ +๋ฒ ์ด์ง +Ap +์„ฑ์ฐฌ +์ž ๊ธˆ +๊ทผ๊ฑฐ๋ฆฌ +๋ฉ˜ํƒˆ +์„ฑ๊ด€ +์˜ค๋šœ๊ธฐ +์–‘์‹์žฅ +๊ฐ•๋ฆผ +๊ธˆ์น˜ +์ด๋ถ€ +##์‚ฌ๊ด€ํ•™๊ต +ํ™ฉ์˜ +VOD +์ˆ˜ํ–‰์ž +๋ถ€๋žด๋ถ€๋žด +์ฑ„๊ด‘ +Cons +##๋ฉ”์ด์นด +์žํŒ๊ธฐ +๊ณผ์ˆ˜์› +๊ตญ์ธ +๊ดด๋กญํ˜” +ootd +์ƒ๋Œ€ํŽธ +์œผ๋ฃจ +๊ฑธ์Œ๊ฑธ์ด +์ƒˆ๋Œ +์‹ ๊ฒฝ์ฆ +์œ ๋ฏธ +๊ฐ€์†๋„ +์‹์ž์žฌ +์‚ฌ๋‹จ์žฅ +์‚ฐ์‚ผ +##ki +๊ฐ€์ฐจ +๊ฐ์–‘ +์–ด์„œ์š” +์ฒœ๋ฐ• +##์ €์ถ• +Bas +๋‹ค๋˜ +์•„๋ธŒ +์„ ๊ฑฐ์ผ +##์‹ ๊ธฐ +๊น€๊ด€์šฉ +๋งค๋„๋Ÿฌ์šด +ํ˜ธ๋Ÿฌ +์–ด์„คํ”ˆ +์žฅ์ƒ +์‚ดํŽด์•ผ +##iki +๊ทธ์œฝ +์ฃผ๋ง‰ +๊ณ ์‚ฌ๋ฆฌ +๊ทธ๋ฆฌ๊ตฌ +๋‚ด๊ฒ +๋จธ์Šด +car +ํ•™์˜ˆ +ํ—Œํ„ฐ +์šฐ์„ ์ฃผ +๋™๋Œ€๋ฌธ๊ตฌ +๊ฝƒ๊ฒŒ +์•ž๋จธ๋ฆฌ +##wo +##๋ฏน์Šค +๋„˜์–ด์˜ค +๊ผญ๊ผญ +๋ฒผ๋ฃฉ +๋น„์ข +๊ฑธ๋ ค์„œ +๋™๊ฐ‘๋‚ด๊ธฐ +์ตœ์ „๋ฐฉ +ํ…Œ๋„ˆ +โ”€โ”€ +๋–จ์–ด๋œจ๋ ธ +๊ด‘๊ตฌ +๋ถ€๊ฒฝ +์„œํ‰ +์— ๋งˆ +##91 +##๊ฒจ์›Œ +๋ถ€ํ’€๋ฆฌ +๋Š๊ปด์ ธ +##๋ณต์ง€ +์ฃผ์ฒด์‚ฌ์ƒ +๋†์ˆ˜์‚ฐ +ใ†์™ธ +๋‚˜์„ฑ +์˜€์—ˆ +##HA +์—ฐ๋ฐœ +##์–‘์ด +##์นด์ฟ  +๊ณ ๋”• +์ˆœ๊ต์ž +๋…ธ์ƒ +์กฐ๋ฐ˜ +์ง€ํ˜„ +์Œ๋ž€๋ฌผ +##76 +๊ณ ๋† +๊ฑฐ์Šค๋ฅด +์—ญ๋ฅ˜ +ํ˜„์•„ +Mic +๋ฒก์Šค์ฝ” +๋ถ€๋Ÿฌ์ง„ +์‹œ์›” +์—„์—ฐ +ํฌ์—ด +์ž„ํ•ด +ํ—›๋œ +์ค€๊ฑฐ +##์„ ๋ฏธ +๋งˆ๋ฅด์…€ +๋ฌดํ„ฑ๋Œ€๊ณ  +ใ†์ง€ +๋ชจ์ž์ดํฌ +์ด๋ฆ„๋‚œ +์„ฑ์šด +๋“ค์ด๋ฐ› +๋ฉด์ฃ„๋ถ€ +๋ฏผํ™” +๋ฐฑ๋ฐœ +๊ฐ‘์ž‘์Šค๋ ˆ +์ •๊ฐˆ +๋”ฐ๋Œ๋ฆผ +์ฃผ๋จน๋ฐฅ +๋ณด์ฒญ +๋น„๋…ธ +ํ•ด์•ˆ๊ฐ€ +๊ฐ„์‚ฌ์ธ +##hys +ํŠน์ˆ˜๊ฐ• +๋„๋Ÿ‰ +๋ฏธ๋…ธ +Society +๊ฐ‡ํžˆ +๊ณจ๋Œ€ +์กฐ๊ฐ• +๋ถ„์ˆ˜๋ น +์œ ๊ฒฝ +๊ฑฐ๋ฒ„๋„Œ์Šค +๋ ˆ์•„ +์š”์  +gre +๊ฐ“๋‚œ +๋นš์–ด์ง„ +์žฌ์˜ +๋ธŒ๋คผ์…€ +์‚ฐ๋‹ค๊ณ  +ํ™ฉ๋ณด +์ œ์„ค +ํ• ๊ฒŒ +##๋ถ€์ง€ +์šด๋™ํ™” +๋ฒ…์ฐฌ +์ด๋ฐ” +์žฅ์‹ค +๊ฐœํšŒ์‹ +์›Œ๋Ÿฐ +##์‹œ๊ณ  +๋ถ€๋”ช์ณ +์‹œ์‚ฌ์  +๋ฌด์ธ๊ธฐ +๋’ค์„ž์—ฌ +##84 +์ƒˆ๋กœ์›€ +์ƒํŠธ +์ฝ˜๋„ +์•„์†Œ +๋“ค์–ด์˜ฌ๋ฆฌ +๊ตญ๋นˆ +##๋ฐœ๋ ฅ +๋Œ€์ž +๋ณธ์ „ +ํžํŠผ +๋ฉธ์‹œ +์ด์นด +๊น€์ƒํ˜„ +๊ณต์‹ค +๋‹ด์ž +##eting +๋ณตํ•ฉ์ฒด +๊ตถ์ฃผ๋ฆผ +##etter +๋นˆ์† +์ดˆ๊ณ ์† +ํ’€๋ฐญ +##ham +์œผ๋ฅด +ํŠœ๋‹ +##32 +๊ธˆํ™” +์†Œํฌ +๊น€์ฃผ์„ฑ +๊ทธ๋žœํŠธ +๋ฐ”์ดํฌ +์›€์ฐ” +๊ฐ€๋Š๋‹ค๋ž€ +pre +๋ฃจํ‚ค +์–‘๊ฐ• +์ ๊ธฐ +์ถœ์—ฐ๋ฃŒ +##outh +๊ด‘๋…„ +์ž๋ฉ”์ด์นด +๊ดด๋กญํž˜ +๋ฐœ๋ช…ํ’ˆ +์‹ฌํžˆ +์‚ฌ๋ฌธ +์†ŒํšŒ +๋ถ„๊ฐ„ +๋‹ฌ๋ ค์™€ +์ˆœ๋ฐœ๋ ฅ +์ฃผ์ „์ž +ํœ˜๋‘ +์ƒˆํ•˜ +๋‚œ๋„ +๊ตํ†ต๋น„ +๋‚˜์ด์Šค +์ „์žฌ +๊ธˆํ•˜ +ํ˜‘๋ ฅ์ฒด +์ธ์Šค +์ผ๋ณ€ +๊ฐ•ํ™”๊ตฐ +๋…์„œ์‹ค +์ž‡๋”ฐ๋ž +๊ท€๊ธˆ์† +์‚ฌ๋‚ฉ +์‹ฑ๊ทธ +๊ธˆ๋ฉ”๋‹ฌ๋ฆฌ์ŠคํŠธ +๋ฆฌ๋ถ€ +##๋‹ค๋‹ฅ +##๊ฑธ์Šค +๋„˜์ณค +๋‹ค๋งˆ +์žฅํ™” +์™ธ๋”ด +์ดˆ์‹ +์น˜์„ธ +๋นˆ์„ผํŠธ +์•Œ๋ ค์•ผ +๋…น์ด +ํ†จ์Šคํ† ์ด +๋‹จ์‹  +๋Ÿฌ๋ธ”๋ฆฌ +์ผํšŒ์šฉ +##78 +##๋™์•„ +์ง‘๋… +ํ”ฝ์…˜ +##์ˆ˜์†Œ +๋ถ€๋“œ๋Ÿฌ์›€ +์•”์Šคํ…Œ๋ฅด๋‹ด +ํ•œ๋ฏผ +๋‘๋งŒ๊ฐ• +๋ฏธํ˜ผ๋ชจ +์›…ํฌ๋ฆฌ +๋ง๋š +์…€๋Ÿฌ +์‹ ์ฐฝ +์กฐ๋ฐ” +๊ฒฉ์ž +์ˆ˜๋  +๋ฐ€๋ ˆ๋‹ˆ์—„ +๋“ฑ๋ณธ +์†Œ์—ฐ +๊ต์œก๋ฒ• +๋“€์Šค +๋ฒˆ๋“ค +์ผœ์กŒ +์Šคํƒ€์ธ +์žฅ์ง„ +1882 +์นด์ด๋กœ +๋“ค๋–  +์›์™ธ +##์‹ฌํŒ +๊ณถ๊ฐ +์ง„ํฅํšŒ +์ •๋ณด๊ตญ +640 +๋ง›์—† +์ด๋ธ +์ง„์ž…๋กœ +ํ›„์ฒœ +์ƒ์†์ธ +์ „๋ฏธ +์ด๊ตญ +์ฐพ์•„๊ฐˆ +๋งŒ์ทจ +์ˆ˜๋ณต +ํ•˜์Šน +๋‹ค์ด๋‚ด +์ˆ˜์•ก +๋‘๋“œ๋Ÿฌ์ง„๋‹ค +##๋น„์•ˆ +##์นด๋ฏธ +๋”ฐ๋ˆ +๋น„๊ฑฐ๋ฆฌ +์•„์ „ +์‚ฌ๊ทธ๋ผ +๋„๋กœ๋ณ€ +๊น€๋‘ํ•œ +์‚ฌ๋‚˜์šด +์ธ๊ธฐ๋ชฐ์ด +์„์ง€๋กœ +๋“ฑ์‚ฐ๊ฐ +ํŒŒ์ธ์• ํ”Œ +๋””๋”” +so +๋ผ์ŠคํŠธ +๋ป”ํžˆ +๋ชจ๋ธ๋ง +๋›ฐ์–ด๊ฐ€ +๊ฐ„๋งŒ +์ž๋ถ„ +##๋Œ€์™• +010 +์พŒ์† +๊ตฐ๊ตญ +์œ„์ธต +๋“œ๋„“ +์ˆ˜๊ฒฝ +##ํŠน์œ„ +์ „์œ ๋ฌผ +ํŒํ™” +##๋ฐฑ์‚ฐ +์‹ ์‚ฌ๋™ +์„์–‘ +์ œ์› +์ฐฝ๋• +##์•ฝ์ฒญ +์›จ์ธ +๋ถ€๋”ชํ˜” +์†์žก๊ณ  +TOP +Jos +๊ฐ€์†๊ธฐ +325 +ํญ๋“ฑ +์‹œ๊ทธ๋งˆ +ํžˆ๊ฐ€์‹œ +ํ‘œ์ค€์–ด +##ํ”„๋ ˆ์†Œ +์ œํ”„๋ฆฌ +์ฐฌ๋ฏธ +ํ•ด๋กœ์šด +ํ†ต์ผ์„ฑ +์Šต๋‹ˆ๋‹น +ํ„ฐ๋นˆ +ํ”ผ์˜ค +์„œ์—ฐ +๋ฐ•ํ•˜ +##์ฃผ๋จธ๋‹ˆ +์ง€์ผœ๋ด +ํ˜„์ฃผ์†Œ +์‹œ์˜จ +์‘์šฐ์˜Œ +๊ด€์•…๊ตฌ +์Šˆ๊ฐ€ +์‹œ์น ๋ฆฌ์•„ +##์—๊ณ  +๋กœ์ œ +ํ‘ธ์ฝ” +##NT +##๋งŒ๋ณ„ +##omeranian +๋น„์ŠคํŠธ +ํ•˜์˜ค +์–ด๋ฒค์ ธ์Šค +์ฒœ์ฐจ๋งŒ๋ณ„ +๋“คํ‚ค +##94 +๋นผ๋Œ๋ฆฐ +์ž์กฐ +์ง‘์ง‘ +๋…ผ๊ฑฐ +๋™์ž‘๊ตฌ +codice +1883 +pomeranian +285 +ํ•˜์ดํ…Œํฌ +๊ฐ์› +๊ถŒํˆฌ +Top +์นดํƒ€ +ํ—˜์•… +์—ฐ์ฃผ๊ฐ€ +๋‹ค์ด๋‚˜ +์ˆ˜๋ผ +์–ด๊ธ‹๋‚œ +##์žฅ๊ฐ +๋ถ€์–ด +##ํŽ˜ํ…Œ๋ฅด๋ถ€๋ฅดํฌ +ํ˜•์ฃผ +##์‚ฌ์˜ +๊ณ ์—ด +ํ•˜ํƒœ +๋งˆ๋ฅดํ‹ด +์กฐ์ธ์„ฑ +์• ๊ฒฝ +์Ÿ๋ฐ˜ +ํ˜„์ • +์‹์•ฝ์ฒญ +์–ต๋ˆŒ +ํด๋กœ +๋ฆฌ์–ธ +์‚ฌ์นญ +ํ™์—ญ +์ถฉ๋ฌด๋กœ +์ƒํŠธํŽ˜ํ…Œ๋ฅด๋ถ€๋ฅดํฌ +์‹œํฐ +๋ฒ„์ “์ด +๊ต์‹  +ํ•ฉ๋‹ˆ๋‹ค๋งŒ +์žฌ์ •๋‚œ +์ตœ๋‚จ +##ํŠน๊ตฌ +NFC +์ง€๋ฐฐํ–ˆ +ํ˜ธ์ฃผ๋จธ๋‹ˆ +##๊ธˆ๊ณ  +##์•„ํ”„๋ฆฌ์นด +์š”๊ตฌ์‚ฌํ•ญ +๋™์  +๋ฃจ๋‹ˆ +##์นด๋ฉ”๋ผ +๋”ฐ๋ผ๊ฐ” +์ฃผ์†Œ์ง€ +์œ„์•ฝ๊ธˆ +ํ—ˆ๊ท  +ํ›„ํŒ +##ode +์ƒ๋‹ด์› +##์„ฌ์œ  +๋‹จ๋… +##๋‹จ๋‹ค +๋ถ€์งˆ +๋ฐฉ๋ฐ”๋‹ฅ +์Œ๋Œ€ +์ฐฝ์•ˆ +ํ† ๋ฐ•์ด +๊ฑด๋ฐฉ +์•…๋ฒ• +##์›ํšŒ +์ธํ”ผ๋‹ˆํ‹ฐ +๋”ฐ์กŒ +๋ฐ•์‚ด +305 +mon +์˜ค์‚ฐ์‹œ +๊พธ๋Ÿฌ๋ฏธ +๋ฌด์ฐธ +๋ฐฉํ˜ธ +์‚ด๋ฒŒ +์ˆ˜๋‹ˆ +์‹คํ•™ +##์บ๋กค +ํ”ผ๋‹‰์Šค +์ƒ๋ฆฌํ•™ +##๋ถ€์•„๋ฅด +๋ถˆ๋Ÿฌ์ผ์œผํ‚ฌ +๊ฐ€์ฐฝ๋ ฅ +๊นจ์กŒ +์žฌ์ถ” +๊ธฐ์กฐ์—ฐ์„ค +๋ฃจํŠธ๋น„ํžˆ +๋‚˜๋ฝ +์ค‘์„œ +ํ† ํ–ˆ +LTV +์ž„์ง€ +ํ•™์ œ +๋ถ€๋Œ€์‹œ์„ค +๊ทธ๋ ˆ์ดํŠธ +##ipp +๊ด‘์•ˆ๋ฆฌ +์ „๋ฉด์ „ +๊ณ ๋งˆ์›  +Sam +DI +์ฐ๋  +Richard +๋งž์ถ˜๋‹ค +์žฅํ•˜ +๋ฆฌ๋‹ˆ์ง€ +์ž„ํ–ˆ +##73 +๋‹ค์‹  +๋””๋น„ +๋ถ„๋ฆฝ +์Œ๋ฐฉํ–ฅ +๊น€์˜ฅ๊ท  +cit +##์…ฐ๋น„ํ‚ค +๊ฐ„์ˆ˜ +ํ”Œ๋ ‰์Šค +๋Ÿฌ์‰ฌ +ํŠ€๊ฒจ +Development +๋ฒˆํ˜ธํŒ +์Šฌ๋Ÿฌ +์šฐ๋ฏธ +๊ธฐ๊ดด +์ฒญ์ฒœ +๋งˆ๋‹˜ +์‹ ํŒŒ +์šฐ๋ฅด๋ฅด +ํ•„๋ผํ…Œ์Šค +##์บ๋กค๋ผ์ด๋‚˜ +์ •์ฒ  +๋ง๊ฐ€์ง€ +๊ณ„์…” +๊ณ ๋ถ„๊ตฐ +ํ™ฉ์ธ +UF +์กฐ์ฐฌ +ํญ๋ฐœ๋ฌผ +๋ผ๋ฒค +์„ฑ์กฐ +ํ•˜๋ฅด +##min +์‚ฌ๊ณผ๋ฌธ +##ํžˆ์ฝ” +ํƒ์‚ฌ์„  +ํ”„๋ ˆ์Šค์„ผํ„ฐ +์ž‘์—…์ž +๊ธฐ์ง€๊ตญ +๋ฐ”๋ฐ” +์ฒœ๊ฑฐ +ํ•ด๋ถ€ํ•™ +๋ฒŒ๊ฑฐ๋ฒ— +ํฌ๋งํ‡ด์ง +์ ๋ฃŒ +์†Œ์œ„์›ํšŒ +์ฃผ๋ฌด๊ด€ +ํ•จ์žฅ +๊ณ ๋ ค์‚ฌ +์•ˆ์ด +์šด๋™๋Ÿ‰ +##์ œํ•œ๊ตฌ์—ญ +์ˆ˜์กฑ๊ด€ +##vern +์ „์ง€ํ˜„ +๋ฌด๋ฐฉ๋น„ +๊ฑด๋‹ฌ +ํˆฌ์ž์•ก +ev +๊ธฐ๋ฏผ +##์—ฌ๊ฐ +##๋ง์šธ +##ffee +ํ•ด๋ฐ€ํ„ด +##ero +๊ณต๋งค +์‚ฌํ—Œ +##ํ˜ธ์šฐ +์ด์Šน๊ธฐ +ํ”ผ๋…ธ +๊ธฐ๋‚˜ +์ฐฝ์กฐ์„ฑ +๋ชฉ๊ณต +ํ—ค์–ด์ ธ +๋ฌผ๋“  +์‹œ๋Œ +๋ชธ๋šฑ์ด +So +์†ก๊ณณ +USA +์ƒ์กฐ +WM +2050 +๊ณจ๋จธ๋ฆฌ +##each +##ํ‰๋™ +์–ด๋ฒ„์ด๋‚  +ํœ˜ํŒŒ๋žŒ +์ „์ž๋ ˆ์ธ์ง€ +ON +์šฐํ›„ +์†Œ๋ฐฉ๋Œ€ +์œ ๋ถ„ +๊ฐœ๋ฐœ์ œํ•œ๊ตฌ์—ญ +1871 +๋“œ๋ผ์ด๋น™ +์‚ฌ์„  +์ด๊ฒจ์•ผ +์•ˆ๋‹ค๋Š” +Kar +์‚ฐํ‹ฐ์•„ +์ฆ๊ธด +๋ถ€๋ฆด +์•ผ์Šค์ฟ ๋‹ˆ +๊ฑฐ๋ฆผ +์‹œ์•„๋ฒ„์ง€ +##๋‚˜๊ฐˆ +ํŒŒ์ˆ˜ +๋‚˜์•„์งˆ +AA +##๋ฅธ์ž +๋ถˆ๋ถ™ +ํŽ˜๋กœ +ํญ์†Œ +์•„๋…€ +๋ฐ๋ฏธ +##ible +์ค‘์‹ฌ์ถ• +๊ณ ์•„์› +##๊ต์ฐจ +์น ํŒ +์‹ ๋น„์ฃผ์˜ +๋‚ด๋ฌด๋ถ€ +๋งž์„ค +์ˆ™๊ณ  +์‹ ๋‹ค๋Š” +์•ˆ์œ„ +์•™๋“œ๋ ˆ +๋งˆ๋กฑ +๋ฐ”๋ฒจ +์ดํ†ต +ํ•™๋ช… +##๋ฏธ์ˆ  +##๊ณต์‚ฐ๋‹น +์˜ํ•ด +##์ง„๋‹ค๋ฉด +์˜ˆ๋‹จ +๊ฐ€์ค‘์น˜ +์ฒญ๋…„ํšŒ +์™„๋น„ +ํฌ๋ฉ”๋ผ +์žํ‰ +์ง“๋ˆŒ +ํผ์ง„ +์‚ด์•„๋‚  +์˜์ž +๊ณ ์˜๋กœ +์—์›Œ์‹ธ +๋‚˜์นจ +๋œ์ปฅ +์„ ๋ฐ์ด +ํ—ฌ๋ ˆ +##erc +์ฒ™์ฒ™ +์ฟ ํผ +ํ˜„๊ฐ +ํ˜„์ถฉ์› +์ œ๋…ธ +์ถ”์‚ฌ +์น˜๋‹ฌ +ํ›„๊ฐ +๋ฐ€์‹ค +์ œํผ์Šจ +์นด์„ธ +ํ•ด์žฅ๊ตญ +๊ทผ์ €๋‹น +๋จนํžˆ +์—์šฐ +์ฒ ๊ฑฐ๋ฏผ +##AAD +๋…น์ƒ‰๋‹น +์ฒœ๋ณ€ +##day +์ด์ข…๊ฑธ +๊ตฌ๋Œ€ +์ฒด์ธ์  +ํ”Œ๋กฏ +์ž”๋‹ค +๋‹ต์Šต +๋ฒ„์Šคํ„ฐ +์†Œํƒ• +์œ ์ •๋ณต +ํŒ๊ฐ€๋ฆ„ +์˜ค์†” +๋ณด์ฒญ๊ธฐ +๊ธฐํ˜ผ +##ํ†ต์‹  +๋‹ค์ธ +์ •์ง„์„ +์ƒ์ง•๋ฌผ +ํ–ฅ๊ธ‹ +์ง‘์ค‘ํ˜ธ์šฐ +##๊ฐ๊ฐ +ํƒ€์ดํƒ„ +ํ•๋ฐ• +ํ˜„์ง€์ธ +๋ฐ์ด๋น— +๊ถค์  +์ด์นญ +##์ด์กฐ +๋ง‰ํž +๊ตํ™œ +๋ƒ‰๋‹ด +๋น ๋“ฏ +์–ฝํžˆ +imp +์‚ดํŽด๋ด์•ผ +์„œ์•ˆ +์„ธ๋„ +๋น„์ž”ํ‹ฐ์›€ +ํƒ„๋‹ค +ํ–ฅ์‹ ๋ฃŒ +๊น€์‘ +์–ด์žฅ +##ใ…Œใ…Œ +๋ˆˆ๋ฉ์ด +๋ง๋‹คํˆผ +๋ฆฌ๋ชจ์ปจ +YS +๋ฐค์ƒ˜ +##instagram +๋„๋† +์ทจ์‚ฌ +๋‹ด์•„๋‚ธ +ํ† ํ•‘ +##74 +##ํ† ๋ชจ +์‚ฌํ—Œ๋ถ€ +๊นœ์ฐ +ํ‰์‹  +##์กฐ๋ชฉ +##65 +ํ•œ๊ตญ์‹ +๋งˆ์„๊ธˆ๊ณ  +๋ฐ˜๋ชฉ +##์ž๋‹ˆ์•„ +ํก์—ฐ์ž +์ˆœ์ž +๋ฌผ๋Ÿฌ์„ฐ +์ง‘๊ถŒ๋‹น +๊ด€๋Šฅ +์ƒ๋„ +์ง„๋‘์ง€ํœ˜ +๊ฐ•์žฌ +ํ™˜๋Œ€ +mar +๊ฐ€๊ต +๋ฐ˜๋ง +์ถœํŒ๋ฌผ +MT +๋ณต๋ณ‘ +์•Œํ”„๋ ˆ +์„์‚ฌ +๊ฐ์–‘๊ฐ์ƒ‰ +์กฐ๋‚˜๋ผ +๋ธŒ๋กœ๋“œ์›จ์ด +์ปค๋„ฅ +ํ˜ธ์น˜ +๋‚˜๋ถ€ +์ƒ์•„ +์ฒœ์ผ์—ผ +์Ÿ์•„๋ถ€ +์ˆœ๊ตญ +##๋“ฑ๊ธฐ +ํ”Œ๋ž˜๋„ˆ +์นœ๋ฐ€๊ฐ +์ž์žฅ๋ฉด +์ถ•๊ตฌํŒ€ +์˜๊ฒฐ์‹ +๊ธธ์žก์ด +์ฆ๋ฅ˜ +ํ™์–ด +์•Œ์•„์ฐจ๋ ธ +๋ฏธํ–‰ +๋ฒˆ์กŒ +๋ฐ”์ ค +ํŒ”๋‹ฌ +3800 +๊ฑด๋„ˆ๊ฐ” +๊ฑฐ์—ญ +์ด์ฐฌ +ํƒˆ๋ฃจ +์˜คํด๋žœ๋“œ +๊ฒฐํˆฌ +##์™€๋ผ +์˜ค๋ฅด๋ง‰ +์•ž์„ ๋‹ค +ํ•œ๊ตญ๊ฐœ๋ฐœ +์‚ฐํ‹ฐ์•„๊ณ  +๋‹ซํžˆ +์ˆจ๊ธธ +๊ฐ•ํƒˆ +์ „ํ•ด์ง„ +##34 +##๊ฐ„๋‹ค๋Š” +What +์ธต๊ฐ„ +๊ณต์„ฑ +์š”๊ฐ• +ํ˜ผ์ „ +์ฝ”๋ฏน์Šค +์—ญ์‚ผ๋™ +##line +##๋ฃฝ์ง€ +๋น„์ƒ์‚ฌํƒœ +์˜ค์žฌ +์šฐ์„ฑ +์ธ๋”์Šค +##ํฌ๋„ +๋ถ์กฐ์„  +ํ•œ๋‚ฑ +๊ฐ€๋„ค +์„ธ๋ฐ” +๋ถˆ๊ฐ€๊ฒฐ +ํ•œ๋‚ฎ +์ง€๋ฆฌํ•™ +์ด๋ง˜๋•Œ +๋…์‚ด +##๋งˆ์‚ฌ +๋„์ „์ž +์กฐ์šฉํ•„ +์˜ˆ์ข… +๋™๋ถ์•„์‹œ์•„ +Mor +ํ•ฉํ—Œ +์ „๋ชจ +๊ธฐ๋…๊ต์ธ +์‹ฌ์—ฐ +๋‚ ์•„๋“ค +dogstagram +๋„์ œ +๋นต๋นต +BG +๋ถˆํ˜„ +##์ œ๊ณผ +๊ณ ๋…ธ +์‹œ์‹œ๊ฐ๊ฐ +๋ฒˆํŠธ +์„œํ‚ท +Star +์š”๋žŒ +์ดํƒ„ +๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ +์„ค์ƒ๊ฐ€ +##๋ฉ”์ดํŠธ +๋™๊ทธ๋ž€ +๊ตญ๋ฐฉ์œ„์›ํšŒ +๊น€์ข…ํ•„ +์žฅ์ฃผ +์ข…๊ตฐ +##๊ธฐ์ˆ ์„ผํ„ฐ +hom +์„ค์ƒ๊ฐ€์ƒ +ํ—ท๊ฐˆ๋ฆฌ +ํƒ€์ž„์ฆˆ +์ง€ํ‰์„  +๊ธฐ๋ง‰ํžŒ +๊ณ ๋ช… +๋‹ฅ์น  +๊ฐ์‚ฐ +๋ถ€๊ตญ +๋ถ€์ด +์‹ ์นธ +์–ธ์•ฝ +##ko +์ƒ์œก +์ˆ˜๋ถ€ +๋™์„œ์–‘ +์—Š๊ทธ์ œ +์ตœ์ค€ +๋ฌด๋“ฑ์‚ฐ +RS +๋Œ€๊ฒฌ +๊ณต์—ฐํžˆ +๋ฏธ์•ผ์žํ‚ค +์•„๋‚˜ํ‚ค +์‹ค๋ ฅ์ž +๋ฏธ์ง€๊ธ‰ +์˜ค์†”๊ธธ +์จ๋จน +๋ฐ”๋ฅด์ƒค๋ฐ” +์‹๊ฒฌ +์ž”๊ณ  +United +ํ†ต์†” +์œค์ƒํ˜„ +๊ฟˆ๊พผ +์ต์‚ด +์ง์ง“ +๋ถ€๋งˆ +ํ˜ธ๋ น +์›”ํ™”๋“œ๋ผ๋งˆ +##ใ… ใ… ใ… ใ… ใ… ใ… ใ… ใ…  +๊ต์ฐฉ +์–ธ์ €๋ฆฌ +๋ˆ„๋ฆฌ์ง‘ +๋ฐฐ์‹ ๊ฐ +##์Šค์ฝ”ํŠธ +์„คํŒŒ +์€์˜ +์‚ฐ๊ธฐ +์‹ค์ถ” +์ง์ง„ +1889 +์—๋ณผ๋ผ +์•ˆ์˜ +ํ•œ๋‹ต๋‹ˆ๋‹ค +๊ฐ€๋ฅด์นœ๋‹ค +๊ป˜์„  +ํ˜•๋ถ€ +๋‹ค๊ฑฐ๋‚˜ +ํฉ์–ด์ง„ +##๋Ÿฌ์šธ +๋†์—…๊ธฐ์ˆ ์„ผํ„ฐ +์ˆ˜์ทจ +์ด์ผ€์•„ +์ง€ํ”ผ +๊ณผ๋ฏผ +๋‹ค๋นˆ +๋‚™์ฐฐ๊ฐ€ +๋™์˜์„œ +์ž‘์‹ฌ +##38 +๊ณผ์ˆ˜ +์Œ์•…์ธ +๊ณต์žฅ์žฅ +ํœฉ์‹ธ์—ฌ +์—ญ์ „ํŒจ +๊ด€์„œ +์ •๋ด‰ +๋Œ€๊ตฌ๋Œ€ +์‹ซ์ฆ +ํ˜ธํฌ +์กฐ๊ด‘์กฐ +๊ฒ€์†Œ +๋Œ„์„œ +ํ”ฝ์—… +##๋ฌด๋„ค +๊ฑธํ„ฐ +##๋ถ€๋ฅด +๋‹ฌ์•„๋‚œ +๋ถ์•„์ผ๋žœ๋“œ +์—ฌ์†Œ +์ตœ๊ณ ๋ น +##๋‚ดํ‹ฐ +ํœฉ์“ธ๋ ค +๋ฅ˜์Šน +##orts +์†ก๋‘๋ฆฌ +THAAD +๋ง‰์‚ฌ +๋ณ‘๋ฌด +์นœํžˆ +๋”ฐ๋ผ๊ฐˆ +์–ธ๋ก ๊ณ„ +Power +์กฑ์žฅ +ํฌ๋ฆฌ์Šค์ฒœ +๋“ฑ๊ธฐ๋ถ€ +์••๋ฐ•๊ฐ +์• ๊ตญ๊ฐ€ +Europe +์žฌ๋น ๋ฅด +htm +๋„ˆ์ง€ +์„ฑํ–‰์œ„ +๊น€์œ ์‹  +๋ฐœ์นธ +์นญ๋‹ค์˜ค +๊ฐ€์šฐ +Review +์„œ์ŠคํŽœ +๋งค๋‹ˆ์•„ +์ฐจ์ฐฝ +์•Œ์ฝœ +์„œ์ฒญ์› +ํŒ”๋ผ +๊ด‘์ฃผ์€ํ–‰ +์ƒ๊ถ +์žฅํ•ญ +##๋ฒ„๋‘ฅ +ํ—ค์•„๋ ค +๋ถ€์ข… +ํ•ญ๊ณ  +ํ›ˆ๊ณ„ +##์ƒ์กฐ +ํ•œ๊ตญ๊ฐœ๋ฐœ์—ฐ๊ตฌ์› +Camb +์žฟ๋น› +์ €์ˆ˜ +์šฐ์ต์ˆ˜ +๋ผํŠธ๋น„์•„ +์‹ ๊ตฌ +์ด๋ค„๋ƒˆ +์˜์žฌ๊ต์œก +๋ธ๋ฆฌ +๋ฒ”ํ•œ +์ž์ผ“ +์ •๊ณต +Human +๊ณจ์นซ +##์†Œ์„ค +์ข…๋ถ€ +์ฐธ์ž‘ +##์„ธ๊ณ„ +๊ณ ์šฉ๋ถ€ +์‚ฐ์ค‘ +ํ”ผํ +๊ทธ๋ฆฌ๊ณค +Cre +APEC +์Šฌ๋ฆฌํผ +##ration +๋ฌผ์‚ฐ +๋ฒ„ํ…ผ +์ดˆ์ „ +์ด๋ณ‘์ฒ  +๋ช…์•” +์–‘ํฌ +์˜จ์ • +์žฅ์ค€ +์•„๋‹์ง€ +์šธ์ƒ +์—ฐ์ถœ์ž +๋ถ€๋„๋• +๋กœ๊ตฌ๋‚˜ +๋Œ€์ƒ์ง€ +ํŒŒ๋ผ์˜ค +travel +ํ™”ํ•™์ œํ’ˆ +์‹ ์‹œ๋‚ดํ‹ฐ +์ฐธ๋‚˜๋ฌด +์‹œ์šฐ +์˜†๋ฉด +๋„ค์˜จ +๋‹ญ๋ฐœ +218 +ํŠนํ—ˆ์ฒญ +ํœ˜๋ง๋ฆฌ +๋„๊ฐ€๋‹ˆ +๋ถ€๊ท€ +๋กœ๊ทธ์ธ +์ด์ฃผ์˜ +์ปท์˜คํ”„ +๊ผฌ๋ฆฌํ‘œ +์˜คํ–‰ +ํฌ์ฒœ์‹œ +238 +์œ„์šฉ +์ธ๋””์• ๋‚˜ +##์‚ฌ์ดํด +๋Œ€ํ‘œ๋ถ€ +์„์—ฐ +##olf +๋ฌปํ˜” +์ดˆ์ž… +์ง•๊ฒ€ +๋ฐ•์„ฑํ˜„ +Mnet +ํ–‰์ฃผ +Best +๋‹ค๊ฐ€์„ฐ +์—ฐํ•ฉ๋‰ด์Šค +๋ฒ ๋“œ๋กœ +์ค„๊ธฐ์ฐจ +์กฐ์—ฌ +๊ด‘์–‘์ œ์ฒ  +์ฒœ์‹œ +๋กœํ…Œ์ด์…˜ +์‚ผ์ค‘ +์ˆœ๋ฆฌ +์œ—์‚ฌ๋žŒ +๊ต๋ณด์ƒ๋ช… +๊ทผ์†Œ +๋– ๋‹ค๋‹ˆ +๋„ฅ์ŠคํŠธ +๊ธฐ๋‚˜๊ธด +์น ์„ฑ +##RE +๋ถ™์—ฌ์„œ +๋•์ˆ˜๊ถ +๊ฒฝ๋‚ด +๊ณจ๋ชฐ +๊ณ ์ณ์•ผ +์™•๋ฆ‰ +๊ณผ์† +๊ทผ๋ฐฉ +์–ผ๋–จ +ํ™ˆํ”ผ +๋ณ„์ผ +์กฐ์…‰ +##์ขŒ์™• +##๊ฐ€๋ฐฉ +ํˆฌํ˜ผ +๊ธ‰์‹๋น„ +๊ฒฝ์™ธ +์šฐ์™•์ขŒ์™• +๋ฐฉ๋ฏธ +##๋‚ด๋ฆฐ +๋ชฐ๋ ค์˜ค +##์žฅ์ด +ํ•œ๊ตญํ•™ +๊ตฐ๋Ÿ‰ +ํ”„๋ฆฐ์Šค +์—˜๋ ˆ +๋ฐ๋ท”์ž‘ +๊ฒฝํฌ +##์ฝœ๋ฆฌ +##ney +๋ฆฌ์ฐจ +##์œ ์—… +๋‚จ์ด +์›๊ฑฐ๋ฆฌ +๋ฐ”๊ฟ”์„œ +๋กœ์ด๋“œ +์ผ์„์ด์กฐ +๊ธˆ๊ด€ +๋‹ค๊ฐ๋„ +RA +๋ฆฌ๊ฑฐ +Hol +๋ฃจ์ด์ง€ +๋ถˆ๊ฐ์ฆ +์žฅ์œ  +์žฅ๋Ÿ‰ +์—์Šคํ†  +์œค์žฌ +231 +๋ถ์†ก +ํ”„๋กœํ…Œ +๊น€์ œ๋™ +Michael +๋ฏธํ†  +๋Š”์ง€๋ผ +๊ตฌ๋กœ๊ตฌ +๊ธฐ๋ฆ„์ง„ +๊ฒฌ์  +๊ธˆ๊ณผ +๊ธฐ๋ฅผ +์ „ํŽธ +์ฐจ๋ฅด +ํ•ด์ถฉ +ํ† ๋ก ์ž +๋ถ„ํŒŒ +๋‹ค๋ฅผ๊นŒ +์•„๋‹ค +Ben +๋ฐ€๋ ค๋‚ฌ +์• ์Šคํƒ€ +ํ˜ผ์žฌ +๋‘ฅ๋‘ฅ +๋ฌธ๋ฐฉ +์‹ฌ์ˆ  +์ค€์žฅ +๋น„์—”๋‚˜ +ํƒ€์ด๋ฒ ์ด +CV +Lo +๊ฐ€์ ธ์˜จ๋‹ค +์†๋Œ€ +์„๋ฅ˜ +๋ฆฌ๋งˆ +์ „๊ฐˆ +ํฅ๊ฒจ์šด +ํžˆํƒ€ +##bl +1850 +ํžˆ๋กœ์‹œ +๋‚ด์ƒ‰ +ํ™˜๊ฒฝ์ฒญ +์•ˆํ•จ +์„ฑ์•ˆ +ํ—ˆ์ค€ +ํ›”์ณค +์ž ์žฌ์šฐ +๋ฏธ๋ง +๋ฐœ๋ฒ„๋‘ฅ +์ˆ™๋ฉด +์ง‘๊ฒŒ +foodporn +์œ ์ž„ +๊ด‘๊ณ ๋น„ +ํŽ˜ํผ +##uel +ํˆฌํ‘œ์œจ +##๋ ˆ๋“œ +MMOR +์ž‘์•„์ง€ +์ฝ”๋ฆฌ +##ures +๋Šฅํ†ต +๋‹น๋ฝ +์˜ค๊ฑฐ +WTI +๋Œ€ํฅ +๋ถˆํ˜„๋“ฏ +๋ƒ‰๊ธฐ +์น˜ํ™˜ +ํŠธ๋ฆฌํŠธ +์ค‘๊ฒฌ์ˆ˜ +์ฝ”์ŠคํŠธ์ฝ” +##๋ฏผ์„ +์ทจ์žฌ์› +ํŒŒ์Šค์นผ +๋ถ€์ฃผ์„ +ํ•ด๊ณจ +์ด๋ค„๋‚ธ +๋‹ค์ผ€๋‹ค +์‚ผ์ผœ +ํ• ๋งค +ํƒํ•  +##๋ˆ๋‚˜ +๊ด‘์ฃผ๊ด‘์—ญ์‹œ +๋น„์•ผ +์˜์‚ฌ๊ด€ +๋ฏผ๊ฐ€ +์˜์ •์„œ +๊ฐœ๊ฐ• +์ˆ˜๋ ต +ํฌ๋‹ˆ +##uf +Paris +์•ˆ๋‹ฌ +ํ•œ๋‚จ๋™ +๊ฒฝ๊ณ„์‹ฌ +ํ™ฉ๊ธˆ๋น› +RT +She +ํŽธ์•ˆํžˆ +OO +๊ฐ์ด‰ +##์ž์‚ฐ +์–ด๋ ดํ’‹์ด +์—ญ๊ด€ +Out +๊ณ ๋œ +์ธ์–ด +๊ทน์น˜ +์• ๋ฒŒ๋ ˆ +ํ•„์  +์ง๊ถŒ์ƒ์ • +์œ„๋„ +์ œ๋ฅด +์ตœ์•  +##๋ฆฌ๋‹ˆ์ŠคํŠธ +์žฅํ•œ +์†ก์ง€ +์ฒœํƒœ +##์žฅ์„ฑ +๋ฐ”์ด์˜ฌ๋ฆฌ๋‹ˆ์ŠคํŠธ +์ƒ๋ณด +์„œ๊ฐ€ +์ •์ƒ‰ +ํ•˜๋™๊ตฐ +570 +๋ฌด๊ฒธ +๋‚จ์–‘์œ ์—… +์ง€์˜ +ํšจ๋ชจ +์ƒ๊ฐ๋‚œ๋‹ค +๋งํ–ˆ +๋ฌธ์•ˆ +์šฐ๋ ˆ +ํ™ฉ๋ฃก +##aum +๊ฑฐ๋ฆ„ +์ค€๋น„๋ฌผ +ํ• ๋ž˜ +##ํƒ„์˜ฌ +Fin +๋ถ๋น„ +์‚ฐ์‹ค +Ox +์ด๋กญ +์‹œํ˜„ +##rich +ํŠธ๋ ˆ์ด๋”ฉ +MMORPG +๊พธ๋ ธ +์–ป์–ด๋จน +์˜คํŠธ +##ํ•˜์‹œ +์กฐ๋˜ +##ํˆฌํ•ฉ +##๋‘˜๋ € +๊ธˆ๊ฐ’ +์ถœ๊ธˆ +์„ ๋‹จ +ํ‚ค์šด๋‹ค +๋…ธํ”„ +์†๊ผฝํžŒ๋‹ค +์ƒ์—ฐ +์• ๋‹น์ดˆ +์˜๊ธฐํˆฌํ•ฉ +Dem +์›์€ +์ตœ๋ฃจ +๋Ÿญํ‚ค +์ˆจ๊ธด +์ˆ˜์šฉ์ฒด +๊ผฌ๋ง‰ +๋…ธ๋™์ฒญ +๋จน๊ตฌ๋ฆ„ +ํŒŒ์‚ฌ +์žฅ์ƒ +๋…ธ์—ฌ +์™ธ์›Œ +์ •๊ฐ +๋ณตํ†ต +##์ฟ ์   +์ฒ˜๋‹จ +##๊ฐœํ†  +##๋ฒ ๋‹ˆ์•„ +์ƒ๋Œ€๊ตญ +๋ช…์„ฑํ™ฉํ›„ +๋ฐ•์ง„์˜ +RNA +์—์ด์ง€ +์กฐ์„ ์กฐ +๋ฐฑ์ข…์› +ํฌ๋ธŒ์Šค +Gener +ํŒ๊ถŒ +##๊ตฌ๋ฅด +๋ถˆ์พŒ๊ฐ +##์ฐจ๋ณ„ +ํ•œํ… +NSC +์ฒด์žฅ +##๋ฐฉ์•„ +๋‘ํ… +์˜ฌ๋ž์œผ๋ฉฐ +๋น ์ ธ๋‚˜๊ฐ„ +ํŠน๋ก€๋ฒ• +์†Œ์‚ฐ +๊ด‘๊ฐœํ†  +์—ญ์ž +์ฝ”ํ”ผ +##์ถ•๊ตฌํ˜‘ํšŒ +๋‚ด๋ฏธ +๋งˆ์ˆ˜ +๊ฑฐ๋ฌธ๊ณ  +ํšŒ์‹ฌ +##ํˆฌ์–ด +์ฃผ์•ˆ์  +์•„๊ณผ +๋ณด๋ฆ„๋‹ฌ +๊ฒธํ•œ +ํ‰์•… +์†ก๊ด‘ +์žฅ์„ +์–ด์‚ฌ +ํ‹ดํŠธ +์™•์‹ญ๋ฆฌ +ํ•ฉ์˜์„œ +264 +๋‹ค๊ฐ„ +๋‹ค์ด์–ผ +์ง€๋ฏผ +##์›์ˆญ์ด +##rib +์•Œ์นด +๊ณ ์‚ฌํ•˜๊ณ  +๋ฏธ๋ฃฐ +##oe +์ „ํ™˜๊ธฐ +๋ฏธ์—ญ๊ตญ +##๊ณ ์Šค +๊ฒฝ์˜์„  +์‹œ๋…ธ +๋ฌดํ—ˆ๊ฐ€ +ํ˜๋Ÿฌ๋“ค +๋‚˜์นจ๋ฐ˜ +์†๋ง› +ํˆฌ์ง€ +๋ชจ๋ฐ”์ผ๊ฒŒ์ž„ +๊ตด์ง€ +๋Œ€ํ•œ์ถ•๊ตฌํ˜‘ํšŒ +์˜ค๋ฌผ +๊ด‘๋ฌด +๋ž˜๋ฆฌ +์•ˆ๋‚ดํŒ +์Šฌ๋กœ๋ฒ ๋‹ˆ์•„ +##kin +๋‹ฌ๋ผ์ ธ +Hen +์ƒ์ฃผ์‹œ +์•„ํœด +๋‚˜๋นด +์„์ง€๋ผ๋„ +๋ฌด๊ฐ€ +์ฃ„์˜์‹ +##๊ฑฐ๋ฆด +์–‘์Šนํƒœ +์–ผํฐ +์•Œ์•„์ฑ„ +์œˆํ„ฐ +์€์–ด +๋งค์ˆ˜์ธ +##LC +๋Œ€๊ตฌ์€ํ–‰ +##ule +์—์Šคํ† ๋‹ˆ์•„ +๋Š๋ฆฟ +๋…ผ๋ฆฌํ•™ +๊ฒฐ์˜๋ฌธ +ํ”„๋ฆฐ์Šคํ„ด +์žฅ์‹œ +ํ†ฑ๋‹ˆ +์•Œ์นผ๋ฆฌ +์ฒด๊ณ  +์ฆ๊ฐ +๊ฒฉ๋ถ„ +๋ฌด์ •๋ถ€ +์„ ์Šน +์šด์ „๋Œ€ +ํŒŒ๋ผ๊ณผ์ด +๋ถ€์ฐจ +๊ฐˆ๋ฆฌ์•„ +๋ฐฉ๊ธฐ +##์žฅ์–ด +์‹ธ์—ฌ +##oman +๊ฒฝ์†” +์„ธ์† +์กด์น˜ +๋’ท๋ถ€๋ถ„ +์ˆ˜ํฌ +๋Œ€์กฐํ‘œ +์„๋ถ€ +์ผ๊ด‘ +์ฆ์† +Adv +์š”๋™์น˜ +๋ฃจํ”ผ +ํด๋Ÿฌ์น˜ +์ „์•ผ +ํ•ด์กฐ๋ฅ˜ +##ura +์•ˆ์‹œ +๊ธ€๋Ÿฌ๋ธŒ +๋ฆฌ์› +๊ตณ์–ด์ง€ +ํ—Œ๋‚ฉ +๊ฐ‘์‹œ๋‹ค +๋‹ค์›€ +์Šค๋กœ +315 +์—ฐ์„ค๋ฌธ +๋‹ค๋ฐฉ๋ฉด +##๋ถ€์‚ฐ +๋ฏธ๋„๋Ÿฌ์ ธ +๊ทผํ™ฉ +##ํ”Œ๋กœ +๊ฑธ๋ ˆ +์ถœ๋‘ +##์ฒ˜์น˜ +๋…๋ฐฑ +ํ™€๋ชธ +์Šนํ•ฉ +reg +์ฃผ๋ชฝ +์‹ ์นธ์„ผ +๋ฐ•์ˆœ +์ž„๋งˆ +๋•์„ฑ +๋ฐ˜๋ฐ”์ง€ +๋ฐฐํšŒ +์‹ฌ๋ฆฌ์ „ +์ƒ‰์†Œํฐ +love +๋‹น๊ท€ +์ €๊ธˆ +ใ†์žฅ +๊ณ ๊ธฐ์•• +๋ฐ”๋น„ํ +๋ถ€๊ด€ +์–ด์ • +๊ธ‰์‚ฌ +๋ด‰์ œ +๋ถˆํ•ฉ๊ฒฉ +์œ„๋‹ˆ +์˜์ ‘ +๋‚ญ์†ก +์ฒด๋Œ€ +##์Šค๋ฆ„ +๊ณ ํ˜ˆ์•• +์Šค๋‚ต +##์ €๋ž˜ +๋ชจ๊ธฐ์—… +##๋ฐ”์•ผ +์˜์ƒ +##์—๋„ˆ์ง€ +ํ•ด์šด๋Œ€๊ตฌ +๋”์ƒต +ํ•œ๊ธธ +##67 +ํ™ฉ๋„ +์—ฐ๊ธˆ์ˆ  +์—ฌํฌ +์ด์•„ +ํžˆ์Šคํ…Œ +##๊ณ„์„  +๊ธฐ๋‹ˆ +์ฝ”์Šคํƒ€๋ฆฌ์นด +์• ์Š๋ฆฌ +๋ฐฐ๋ถˆ +์†ก์œ  +##ํŒŒํ‹ฐ +##์—์ด๊ณ  +๋’ค๋Œ์•„๋ณด +Christian +๋ฑƒ๊ธธ +##๋””ํ‘ธ์Šค +##ํŽ˜์ŠคํŠธ +๋’ท๋‚  +์ˆ™์‹ +๊ณ„๋ชฝ์ฃผ์˜ +๋’ค๋กœ +์ถ”๋Œ +ํ—ฌ์‹ฑ +ํŠน์งˆ +์‚๊ฑฑ +์†ก์ค‘๊ธฐ +##์ŠคํŠธ๋ฆฌ +๋ˆˆ์น˜์ฑ„ +์ƒŒ๋””์—์ด๊ณ  +์ „์‚ฌ์ž +์ถœ๋ชฐ +์ฝ”๋‚œ +์‹คํƒ„ +์˜ค์ด๋””ํ‘ธ์Šค +##์›Œํ”„ +๋กœ๋นˆ์Šจ +ํ™€๋Œ€ +์›€์ง์—ฌ์•ผ +๊ฒฉ์–ธ +์—์Šคํ‹ฐ +์ง•๊ฒ€๋‹ค๋ฆฌ +๋Œ€ํ•ฉ์‹ค +##63 +๋“ค์—ฌ์˜ค +๊ฑด๋‹ด +๋šซ๋ ค +๋ƒ๊ตฌ +๋„์‚ด +##gl +George +ํŒŒ์ปค +ํ™์„ฑํ” +instadaily +๊ธธ๋“ค์ด +์ˆ˜์‚ฐ์—… +๋ง›๊น” +##์„ ๋‹ค +์ •๋‘์–ธ +์œ ํ˜„ +##ํ”Œ๋žœ +##eauty +์ค„๋ฆฌ์—ฃ +๋Š๊ธด +ํ˜ธ๋ฏธ +์ถœ๋ฒ”์‹ +๊ฐ€๋ผ์•‰ํžˆ +UV +๋‚˜์˜ +์‹ฌ์ทจ +์กฐ๋‚˜ +์ง„๊ณผ +Association +๋‹ˆ์ฆ˜ +##๊ฒŒ์Šค +์ œ์‹œ์นด +์ผ๋‹จ๋ฝ +๊ฐ•์˜ +ํšŒ์„  +์ˆ˜๋ชฉ๋“œ๋ผ๋งˆ +##์–ผ๋นˆ +๋– ๋‚˜๊ฐ€ +ํฌ๋ฉ”๋ผ๋‹ˆ์•ˆ +##๋Ÿฌ์›Œ์„œ +์ตœ๊ทœ +๋งˆ์ง€๋ชปํ•ด +์„œ๋‘˜๋Ÿฌ์•ผ +์•‰ํ˜€ +ํ† ๋“œ +##์กฐ์‹ฌ +์ €์šธ์งˆ +์ผ๊ฑฐ๋ฆฌ +๊น€์ฒœ์‹œ +์‚ฌ๋ฌด์—˜ +์˜์žฅ๋‹จ +์ผ๋ ‰ํŠธ๋ฆญ +POS +ํฌ๋‚˜ +ํƒœ์–‘์—ด +๊ณต์†Œ์žฅ +์„ฑ๊ต +##๊ธ๊ธ +๋‹ค์–‘์ฒด +์˜ˆ๋ฐฐ๋‹น +๋…ธ์•ฝ +๋ง‰๊ตญ์ˆ˜ +๋ถ€๋‹ค +##ilm +์ „์ „๊ธ๊ธ +๊ณผํ•™๊ด€ +##CN +์ฒ™๋ฐ• +๋กฏ๋ฐ์นด๋“œ +์ข‹์•„ํ•ด +์œ ์—”๊ตฐ +๋‹น๋ฌด +์—ผ๋ถ„ +์ด๋ฒ ์ด +์ €์Šคํ‹ด +์˜๋ฌธํ•™ +๋“ค์œผ๋‹ˆ +์ง€์‚ฐ +์žฌ์ž‘ +๋ณธ์—… +๊ตฌํ•œ๋‹ค +ํ•œ์ˆ˜ +๋ฐฐ๊ณ ํ”ˆ +์‹ฌํฌ๋‹ˆ +##rateg +๊น€๊ฐ• +์ž‘ํ’ˆ์ƒ +์•Œ์•„๋ณธ +์–ด๋ ค์›Œ์งˆ +โ €โ €โ €โ €โ €โ €โ €โ €โ € +ํ…Œ๋“œ +will +๊ธฐ์ง€๊ฐœ +Frank +์ฑ„์›Œ์ ธ +ํœด๊ฒŒ์‹ค +์ผ๊ฒฉ +์—ฌํ–‰๊ธฐ +์ „์ดˆ +๊ฑธ์Šค +๋‘”์น˜ +๋„์ด +ํŒจํŠธ๋ฆญ +๋‚™์ฐฐ์ž +Sal +##68 +์ ๋ ธ +๊ฒฝํ—˜๋‹ด +์ง€์ผœ์˜จ +ํ์ฐจ +##๋„ค์ฆˆ +##์ŠคํŠธ๋Ÿญ +์˜ˆ์ง€ +ํฌ๋ฌธ +๋…ธ์ • +##์ข…ํ•ฉ +ํ•˜ํ†  +๋‹น์‚ฐ +์š”๊ฒŒ +์ด์‚ฌ๋ น๊ด€ +์งญ์งค +PH +๋…ธ์‹ฌ +๋ถ™์ธ๋‹ค +์˜ˆ๋ปค +ํœด๋จธ๋‹ˆ์ฆ˜ +Japan +๋Ÿญ๋น„ +๋น„์˜ฅ +์„ค์•… +์ฒซ์„  +์ดํ•œ๊ตฌ +ํฌ๋ฆผ์น˜์ฆˆ +๋‹น์ฃผ +๊ฐœ์ฐฐ๊ตฌ +์Šน๋ถ€์ฒ˜ +๋ฐ€๋ฆผ +์—ฐ๊ฑฐ +ํ๊ด‘ +์ฐœ์งˆ๋ฐฉ +์•ˆ์ข…๋ฒ” +๊ฐ๋งˆ +ใ†์ƒ +๋Š˜์–ด์ง„ +์ข‹์•„ํ•  +๋Œ€ํƒ€ํ˜‘ +์ผ์šฉ์ง +Dream +์•ฝ์„œ +ํ˜œ์•ˆ +##ets +GA +์ˆ˜๋„์‚ฌ +๋›ฐ์–ด์˜ฌ๋ž +๋ณ€๋‘๋ฆฌ +##IV +๋ฒˆ์ฃผ +๋”ฐ์™” +๊ธ€๋กœ๋ธŒ +๋Œ์–ด์˜ฌ๋ ค +๋ฉ”๊ธฐ +์ผค๋ ˆ +๋ฐฑ๋‚จ์ค€ +๊ฐ์ถ˜ +##ํ˜ธ๋ž€ +๊ฝƒ๊ฐ€๋ฃจ +์–ด์œ  +์ง€๋ฐฉ์ฑ„ +์ „๊ธฐ๋™ +๋ฐ”์ด์ € +๊ธฐ๋ถ€์ž +๊ฝƒ๋ฐญ +##FL +Charles +๋ชจ๋“ฌ +ํ˜น์ž +##์ปฌ๋ ˆ์ดํ„ฐ +##๊ฑฐ๋ ค +##์šฉ์„ +HO +๋‹ด๊ทผ +ํ•˜์–ผ๋นˆ +##์น˜๋ฃŒ +์–ด๊ฐ€ +##๊ฐ์‚ฌ +Group +val +##ํ—ค์ด +๊ฐœ์ฒ™์ž +ํŒ์ฝ˜ +์„œ์œ  +##ense +์ด๋ฒ”ํ˜ธ +ํ‹€์–ด๋ง‰ +OCI +์˜ค๊ฐˆ +##์—์„œ +##๋‹ฌ๋ผ +##์žฅ์‚ฌ +Af +๋ถ€๊ต +ํ‡ด์—ญ +์ˆ˜์† +์•ผ์˜ +์˜ค๋ฅธํŒ” +๊ฒฌ์ธ์ฐจ +์†์‰ฌ +์•ŒํŽœ +์กฐ๋ชฉ์กฐ๋ชฉ +ํŒŒํ”„๋ฆฌ์นด +๋งˆ์šฐ +๋ฐ•๋ฏธ +์—์Šค์ปฌ๋ ˆ์ดํ„ฐ +๋ผ์šธ +##ํŠธ๋ผ์ดํŠธ +##์ฒด์œกํšŒ +##ush +๊ท“๊ฐ€ +์‚ฌํฌ +๊ฐˆ๋ž +๋‹ค๊ตฌ์š” +์ž…ํšŒ +ํ•ธ๋”” +##ํ™€๋ฆ„ +์ตœ์ผ +๋…ํ•™ +์ถ”์‹ฌ +๊ฐ–์€ +๋‚จ๋™๋ถ€ +์˜๊ฒฝ +##ao +##ari +510 +์ฒœ์  +์†์•„๊ท€ +๋ชฉ๋ จ +๋น—๋ฐฉ์šธ +์–ด์งˆ +๊ผญ๋‘ +์ˆœ์ต +์ฐจ๋ ค์ž… +์ขŒ์žฅ +##๋ฌผ์‚ด +##์ฟผํ„ฐ์Šค +์–ด๊ธด +Sw +์˜ˆ๋ฌผ +๋ƒ๋ฉด์€ +๊ธ‰๋ฌผ์‚ด +์•„๊ตฌ +##์Šค์œ™ +##ulti +Psych +์™ธ๋™ +์‹ ๊ณ ์ž +์‹ฌํ•ด์กŒ +์ตœ์น˜ +ํ‘ธ์กฐ +์ถ”์ฒœ์„œ +ํ™ฉ์ƒˆ +##๋ธŒ๋กœ +๋‚ด๋น„์น˜ +์ ์žฅ +224 +ํ™”๊ฐ•์•” +##๋Š๋Ÿฌ๋ฏธ +##์ธ ํ•˜์ด๋จธ +๋ถˆ์‘ +์ƒค๋ธŒ +ํ™˜๋ฉธ +##์ง€๊ธ‹ +๊ฐ€๋ ค์›€ +์ฐŒํ‘ธ๋ ธ +๋…ธ๋ฅธ์ž +ํ•ฉ๊ธˆ +๋ ˆ์ธ๋ณด +##ํด๋ผ +##ental +๊ฒฝ์šด +์•ผ๊ฒ ๋‹ค +SG +์†Œ์˜ +์ฒด๋กœ +ํ€˜์ŠคํŠธ +ํฅ๋• +##์–ด๊ฐ€ +345 +๋ชจํ‹ฐํ”„ +์˜๋กœ +๊ธฐํŠน +๋ฆฌํžˆ +๋งŒ๋งŒ์ฐฎ +์ง€๊ธ‹์ง€๊ธ‹ +๋„์น˜ +์ถ”์•™ +๋ถˆ๋Ÿฌ์•ผ +##une +์—ญ๋ชจ +ํ•œ์ž์–ด +๋งˆ๊ณ  +๋ฐ˜์ผ +ํ†ต์˜์‹œ +๋…์„ค +์ค€์œ„ +ํƒ•ํ‰ +์•Œ์ธ ํ•˜์ด๋จธ +ํ’์‚ฐ +ใ†ํ˜„ +๊ฐ€ํ†  +๋ฏผ๋‚ฏ +๋น›๋‚ด +์ƒํฌ๋ฆผ +์ง€๋ฐฐ์ธต +์กฐ์ง€ํ”„ +์œ ๋น„์ฟผํ„ฐ์Šค +์—ญํšจ๊ณผ +์œ„๋ฌธ +๋งน์ฃผ +์ž˜๋ผ์„œ +์„ญ๋ ต +๋ถ€๋‹จํžˆ +๋ฏผํ˜ +ํฌ๋‚˜ํฐ +์†ก์ˆ˜ +์•„ํ +์›๋กœ์› +๋ฐฉ์ „ +์†”์†” +์•ผํฌ +##๊ฒฝ๊ถ +์„ธ์ถœ +630 +๊ธˆ๋™ +๋น…์Šค +๊ธฐ๋ฆ„๊ธฐ +ํ™ํ•ฉ +680 +๋งˆํ‚ค +๋ง‰์—ฐํžˆ +ํƒ€๋ฅดํŠธ +๋ฐœ์ฃผ์ž +๋ˆˆ์—ฌ๊ฒจ๋ณผ +๊ต์šฐ +์ง„๋‹จ์„œ +๊ฑด์กฐ๊ธฐ +๋‚˜์  +์›”๋งˆํŠธ +๋…ผํ˜„๋™ +๊ฑธํ„ฐ์•‰ +๊ธฐ๋ฅธ +์–ดํ”Œ +์ทจํ•ฉ +์ฐพ์•„์™€์„œ +##์Šคํƒ€์ธ +์„ ๋•์—ฌ์™• +์‹œํ‚จ๋‹ค๊ณ  +##ago +๋ฌธ๊ด€ +์ดˆ๊ณ„ +์†Œ๋ฐฉ๋ณธ๋ถ€ +ํ•ด์ธ์‚ฌ +๋Œ€๋‡Œ +##ember +ํ•จ๊ฒฝ๋„ +๊ฒŒ์ด์ง€ +๋กœ๋ฏธ +์ €๊ณ ๋ฆฌ +์„œ๋ฆฐ +์ดˆํŒ +ํ”ผ์šด +ํ™”์ • +๊ตฌ์กฐ์ฃผ์˜ +์ˆ˜๊ฐ•๋ฃŒ +๋‘๊บผ๋น„ +์ด๊ฐ€ +๊ธฐํ†ต +๋‹ˆํ‚ค +๋ถ“๋‹ค +๋ฐฐ๋ณ€ +๋น™๊ธ‹ +ํ•™์Šต์ง€ +๊ฐˆ๊ธฐ +์ˆœ์ง +ํ‹ฐ์ฝ” +๋ฌด์ž„ +ํ„ธ์ฉ +๋ช…๋ น์–ด +์ถ”์žฅ +ํ•ฉ์„ธ +med +์˜์† +ํ”ผ๋‚œ์ฒ˜ +๋ฏธํฌ +๋ฒผ๋ฅด +ํŽธ์ง‘๊ตญ์žฅ +๋‚จ์„œ๋ถ€ +๊ฒฝ์ฐฐ๋Œ€ +HE +๋“ ๋‹ค๋Š” +๋ฐฐ์˜ +ํ•™์‹ +ํ˜•์กฐ +์‹์Œ๋ฃŒ +์ง€๋ก  +##์ฒœ๋ฉด +219 +mark +์ˆ˜๊ฐ‘ +##ํƒ„๋‘ +๋Œ๋ฆฐ๋‹ค +9900 +์กฐ๋ฐ”์‹ฌ +์ด๋„ +์žฌ์ฒญ +์—ฌ์ƒ +์ธก์ •๊ธฐ +์•Œ๋กœ์— +๋– ๋‚ด๋ ค +ํ•ด์„œ๋ผ๋„ +๋กœ์‚ฌ +์‚ด๋ฉฐ์‹œ +##39 +ํ™•์„ฑ๊ธฐ +##61 +๋‚œ์†Œ +์ €์† +์˜ค๋‹ต +์ „๋ฆฌ +##๋น„์˜ค +๋ฐฉ๋ถ€ +ํฐ์† +iOS +##์‹œํƒ€ +์šฐ์‚ฌ +##ํƒ€์นด +์„œ์ผ๋ณธ +##๋ชจ๋”๋‹ˆ์ฆ˜ +๋˜์ƒˆ๊ฒจ +ํ˜œ์› +์„๋ž˜ +234 +์ „๋ฐ˜์ „ +์•ˆ๋…•ํžˆ +๊ฒฝ์žฅ +์•„์žฌ +์œก์‚ฌ +ํƒ€์•… +์ฐฝ๋•๊ถ +๋›ฐ์–ด๋“œ +##๋น„์—” +ํ˜ธ์ด +AN +๋‹ฌ๋งˆ +๋›ด๋‹ค +##๊ธฐํš +๊ณ ์—ฌ +๋””์—  +์กฐ๋ชจ +๋ฒ ๊ฐ€ +์ –์†Œ +##๋„๊ทธ +๊ณต๋ฌธ์„œ +๋™๋‹น +์‚ฌ์ง„๊ฐ€ +๋งคํŠธ๋ฆญ์Šค +1893 +##์ถœํ˜ˆ +๊ณ„๋ชจ +๋‚˜๋ฌด๋ผ +ํƒ„์›์„œ +๋ฌธ์ง€๋ฅด +์‚ฌ์‹ค์ฃผ์˜ +ํ˜๋Ÿฌ๋‚ด๋ ธ +๊ฐ์ถœ +๋…์ฐจ +BIS +์™”์œผ๋‹ˆ +๋ฐฐ๊ธ‰์‚ฌ +UR +๋ผ๋ฐ” +์ฒœ๋ฏผ +์„ฑ์ธ๋ณ‘ +๊ฒ์ง€ +๋’ท๋ฐ”๋ผ์ง€ +์ข…๊ธˆ +ํ•ด์ค€ +##๋ฐ•์ž +๋ถ€์ด์žฌ +๊ผฌ์ด +๋Œ€์•ผ +์„ผ์นด์ฟ  +๊ฒฝ๋ฝ +##๋“ค๋Ÿฌ +##ํ˜œ์˜ +์‹ญ์ผ +๋‹จํ˜ธํžˆ +๋น„ํ•ฉ +##62 +๋“œ๋ผ๋งˆํ‹ฑ +๋ฐ˜์› +์•Œ๋žŒ +cy +ํƒฑ๊ณ  +์•ค๋“œ๋ฅ˜ +๋…ธ๋ฆด +์ƒ๋ชจ +##์†Œ์‹ฑ +์ด๋ค„์ง€๊ธฐ +๊ตญ์กฐ +์ œ์ œ +์—ฌ์„ฏ์งธ +ํŒŒํ—ค์ณ +ํ”Œ๋ž˜์นด๋“œ +์•„๋ถ€์ง€ +Ger +์ด์ƒ๋“ +๊ฟˆ์† +์ง„์ทจ +๋ฏธ๋Œ€ +##ํ‹ฐ๋„จํƒˆ +์•™๊ธˆ +ํ˜ธ๊ตฌ +์ค‘์‹ฌ๊ฐ€ +๋ถ€๊ฐ +์ง‘์–ด๋“ค +์˜ฅ์ˆ˜ +ํŒป๋ง +์ตœ๋ฌธ์ˆœ +๊ธˆ๊ด‘ +์ž๋ฅธ +์นœ๋ฌธ +์ตœ๊ณ ๊ฐ€ +๋Œ€์ถœ์ž +๋‚˜๋น ์กŒ +๋ฌธ๋ณ‘ +ํ”„๋ผ์ด๋“œ +์‰ฌ์šฐ +์ˆ˜์„๋Œ€ํ‘œ +๋ฏธ๋‚˜๋ฏธ +์ž์šฑ +์ƒ๋ฌด๋ถ€ +๋…ผ์ œ +๋†€๋ฆผ +๋ฒ”ํผ +๋ฐฑํ•ฉ +##์ƒค๋ธŒ +์ž๋ฃŒ์ง‘ +์ „ํ˜„ +์ž„์ง„๊ฐ• +์˜์šฉ +์ง„์›์ง€ +FRB +์ฃผํ˜„ +์นผ๋ฅจ +ํ†ต์กฐ๋ฆผ +๊ด€๋ฆฌ์› +์‚ฌ๋ฌด์ง +WP +๋ฒ„๋ ค์„œ +ํฌ๊ต +๋ฌดํ•œํžˆ +๋ชฉ์ƒ +์‡ ๋ฝ +ํ™์ˆœ +##์ฝ˜์„œํŠธ +์•„์›ƒ์†Œ์‹ฑ +Met +์ง€์„ +์–ดํ”Œ๋ฆฌ์ผ€์ด์…˜ +๋”ฐ๋” +ํ—ˆํ™ฉ +๋ฉฑ์‚ด +์ž์•„๋‚ธ๋‹ค +์ŠคํŒŒ์ด๋”๋งจ +ํ•„๋Ÿฌ +๋“ค์ฉ์ด +๋ฐ˜์•ผ +295 +์ขŒ์ ˆ๊ฐ +๊ณผ์‹ +๋ฒ ๋ฒ„ +ํ—›์†Œ๋ฆฌ +๋™์Šน +์œ„๊ตฌ๋ฅด +ํŒจํ•ด +ํ˜ธ๋ช… +๋™๊ทธ๋ผ +์Šˆํƒ€์ธ +์ถ•ํ˜‘ +ํ™”์Œ +์ด์น˜๋กœ +๋กœ๊ณ ์Šค +์˜ˆํ•˜ +ํ—ˆ์ƒ +ํ”„๋ž€์ธ  +๋งค๋ณต +๋ถˆ๋˜ฅ +์œค์ฐฝ +๋Œ€๋ฐฉ +๋ฏธ๊ฐ +๋ฐฉํ’ +๋Œ์•„๋ณผ +์†Œ๋ฆผ +ARS +์กฐ์ค‘๋™ +ํํšŒ +##๋ฒ„๋ฆ‡ +##์ƒ๊ฐ +์‹ฌ์˜์œ„ +ํฌ์ŠคํŠธ๋ชจ๋”๋‹ˆ์ฆ˜ +ใ†๊ฒฝ์ œ +์•จ๋ฆฌ +##๋‹ค๋…€ +##์ธ ํฌ +๊ฒ€์ˆ  +์šด๋ฌธ +๋งˆํ…Œ +304 +๋‹น๊ทœ +ํ–ฅ์„œ +##angu +์ „ํšก +์ง€๋ž„ +ํ™์ • +๊ณ ์Œ +๊ธฐ์•” +์น˜๋„ +๋‹ดํŒ +๋ฌด์ • +์œ ๋ณ„ +๋‹ต๋ก€ +๋‘˜๋Ÿฌ๋ณธ +๋ฏธ๋„๋Ÿผ +์ฝ˜์†” +##itz +##urt +##here +##๋‹ค์ด๋ผ +๊ตฌ์—ฐ +์ €๊ฑธ +์ฟ ํŒก +sec +ํ„ฐ์Šค +##์šฐ๋šฑ +๋“ค์–ด์˜ฌ๋ ธ +##์ธ์ˆ˜ +China +##64 +ํŒŒ๋…ธ๋ผ๋งˆ +ํˆฌ์ฒ  +์ตœํฌ์„ญ +ใ†์ฃผ +ํŒŸ์บ์ŠคํŠธ +LCC +##ading +๋ฌผ๋Ÿฌ๋‚  +์ดˆํ†  +์•„ํƒœ +1888 +ํŒŒ๊ณ ๋“œ +๋Œ๋ ค๊ฐ” +๊ฐ‘์˜ค +์ฃผ๊ฑฑ +์”Œ์šฐ +๋ฒ•๋‹น +ํ๋ฆฐ +์—์–ด๋ถ€์‚ฐ +์ฒด์ธ์ง€์—… +ํ–‰๋ณต๊ฐ +his +##ํ˜ˆ๊ด€ +์™•์ขŒ +์ €์ธต +๋ฐœ์‚ฌ๋Œ€ +##OU +์นผ๋Ÿผ๋‹ˆ์ŠคํŠธ +๋ธŒ๋ฆญ์Šค +์‚ฌํˆฌ +##์˜ˆ์ง„ +ํ•œ๋‹จ๋‹ค +์ตœ๋‹จ +##์–ด์Œ +##์‚ฌ์œ„ +UCC +๋•๋‹ด +๋ณ‘์‹  +์ˆ ๋ณ‘ +ํšŒ๋‹น +์œ ์ „์ฒด +๋‹จ์—ด +๋‹จ์˜ค +์—…์žฅ +##์ง€๋ถ• +##์นดํƒ€ +๋“œ๋‚˜๋“œ +์ฒญ๊ตฌ์„œ +์Šคํ†กํ™€๋ฆ„ +์‚ฌ๋ณ„ +##ํƒ€ํŽ˜ +##igital +ํ˜„์˜ +ํฅ์„  +์˜ค๋กœ๋ผ +๋‹จ์ „ +๋ฌด๋‚œํžˆ +์ฒญํ˜ผ +ํ–ฅ์‘ +๋˜์Šค +๊ฒฉ๋™ +๊ธฐ๋Ÿฌ๊ธฐ +์Šคํƒฌ +์Šนํ•˜ +##์ €๋งŒ +๋“คํ†ต +๊ด€๊ตฐ +ํ† ๋ชจ +ํŽธ๋ฐฑ +##๋งˆ๋ฅดํฌ +์‚ด์•„๋‚ฌ +๋–จ๊ตฌ +๋ถ€๋Œ€์› +๋ถ•๋Œ€ +##๊ณ ๋ฅด +์˜ค์Šคํ‹ด +๋„ํ†ต +๋ชธ๊ธธ์ด +๋ถ„๊ด‘ +๋ฐ”๋‹ท์† +##irect +์ˆ˜๋ฆฌ๋น„ +๊ณค๋‘๋ฐ•์งˆ +##์ŠคํŠธ๋Ÿญ์ฒ˜ +๊ฐ„์ด +๋งŒ๋„ +๋ชจ์šฉ +East +์ถœ์  +Free +๋นŒ๋ ธ +์–ด์šธ๋ ธ +๋ฐœ์น˜ +Pat +๊ฒฝ์ด +์™€์‚ฌ +๋งค๋ฆฌ +์„œํˆฐ +์ŠคํŒ… +์š”์ ˆ +1892 +์—ฌ๋ช… +์œ ์ด +1886 +์กฑํžˆ +##์ดˆํ”„ +Organ +๋นจ๋Œ€ +253 +์˜ˆ๊ธˆ์ž +์œ ํ™ฉ +์‹ ์•ˆ๊ตฐ +๋ผ๋“œ +์ผ์ง +๋ณด์‚ดํ•Œ +์„œ๋ก  +์œค์ง„ +๋ฃจ์นด์Šค +๋Œ€ํ•œ์ƒ์˜ +๊ฑฐ๋ถ์ด +ํŽด๋‚ด +์š”๊ดด +๋งˆ์Šคํ„ฐํ”Œ๋žœ +๋น„๋‰ด +๋ฌผ๋ ˆ +๋ฌผ๋ฆฌ์ณค +์•„๋…œ +์ƒ์‹ค๊ฐ +๊ณต๊ฐˆ +๋“€์—ฃ +์ฐจ๋กœ +๋…ผ๋ฐญ +์œผ๋‹ˆ๊น +Cambridge +์‚ฐ๋ฐœ +์…€๋ ˆ +์˜์—ฐ +๋…ธ๋žซ๋ง +Elect +์†Œ์ž„ +์น ์›” +์‰ฌ์šธ +์ตํž +๋‘๊ทผ๋‘๊ทผ +Institute +##์™•์‚ฐ +ํƒ€๋ฐ• +์ด์—์•ผ์Šค +์ตœ์ง€ +๋™ํƒ +์ˆ˜๋ฅ˜ +์ง€๊ฒน +์ถ”์•… +##์ง€์Šค +๊ฐœ๊ฒฝ +375 +๋„์ทจ +์‚ญ๋ฐœ +๋ถˆ๋ˆ +์œ ๋ช…๋ฌด์‹ค +ํ™”์ฃผ +๋ณดํ†ต์ฃผ +๋…์ฐจ์ง€ +์•„์›… +์ด๋งŒ์ €๋งŒ +์˜ตํ‹ฐ๋จธ์Šค +##์• ์•  +๋ฉดํ—ˆ์ฆ +์š”๊ฒƒ +๊ฑฐ๋ถ์„  +##ใ…œใ…œใ…œใ…œ +ํžˆ์‚ฌ +ํ†ต๊ณ„ํ•™ +์ƒค๋ธŒ์ƒค๋ธŒ +์‚ฐ์‹œ +##๋ฐฉ๋ง์ด +์•„์ด์   +์‹œ๊ธ‰ํžˆ +##์œตํ•ฉ +์ƒ๋Ÿฌ๋ฆฌ๋งจ +๋…ธ๋งˆ +์ง๋ถ„ +๋Œ€๋‹ˆ์–ผ +๋งˆ๋” +๊ณจ๋™ +ํŒŒ์Šคํ…” +๊ฐ„์ฒ™ +์‡ผํŒฝ +ํŽ˜์ŠคํŠธ +ํฌ์ธ  +ํ—๋–ก +์ธํ”„๋ผ์ŠคํŠธ๋Ÿญ์ฒ˜ +๊ณ ๋™ +์—ฌ๋น„ +์ถ”๋ฌธ +์ง€์นจ์„œ +๋‹ค๋„ +##๋ฌธํ•™ +์„ผ์„œ์Šค +##ype +์ด๋ค„์ง€๋ฉด +๊น€๊ฑด +๋น„๋‹ˆ +์˜์ฒœ์‹œ +ํŒจ์…” +1848 +์ฝœ๋ผ๋ณด๋ ˆ์ด์…˜ +์˜จ๋ˆ„๋ฆฌ +๊ด‘์—ญ์ž์น˜๋‹จ์ฒด +์žํฌ +์ฒ˜ํ•  +๋†”์•ผ +๋นŒ๋ฆด +๋ˆ„๋Ÿฐ +KR +๊ณ ์‹œ์› +๋‚˜์Šค๋‹ฅ +State +๊ฑด์ถ•์‚ฌ +๋‚ ์•„๊ฐˆ +์™ธ๋ฌด๋ถ€ +์„๊ฐ€๋ชจ๋‹ˆ +์•„์„ฑ +๋น…ํ„ฐ +์ข…๊ณผ +##๊ณต๊ฐ„ +์“ฐ๋ผ +##์•„์ฐŒ +๋งž๋ถˆ +์‚ฌ๋กœ์žกํžŒ +์ค‘๋ก  +์ฒ˜๊ฐ€ +์ˆ˜ํ–‰์› +์†์‰ฌ์šด +๊ธ‰์ž‘ +๋“œ๋กญ +๊ต๋ฌด +๊ธˆ์š• +##๋ƒ…๋‹ˆ๋‹ค +์ŠคํŠธ๋ ˆ์ดํŠธ +์—ฐ๊ฑฐํ‘ธ +๋„˜์นœ๋‹ค +๊ณ ์†์ฒ ๋„ +์žฅ์‹ ๊ตฌ +Peter +๋ฆฐ๋‹ค +ํ“จ์–ด +##์ค˜์•ผ +์ˆ˜์ฑ„ +์ฟจ๋ง +##PN +๊นŒ๋งˆ๋“ +๋ฃจ๋Œํ”„ +์ค‘ํ™˜์ž์‹ค +์‚ฌ์ฟ ๋ผ +ํ˜ธ๋“ค +๋ฉ”์ดํ”Œ +ํด๋ ˆ์–ด +Asia +์‚ฌ์˜ค +ํŒ์ž +##์น˜์šฐ +๋Œ€ํ•œ์ฒด์œกํšŒ +์„ ํƒ์ง€ +๊ตฌ๊ตญ +์˜ค๋ฌ˜ +๊ฐ๊ฐ€์ƒ๊ฐ +๊น€์ง„ํƒœ +##๋‹ค๋‹Œ๋‹ค +๊นŒ์น  +๊น€ํ•˜๋Š˜ +์•Œ๋ก +Bre +๊ฒฉ์ • +์žฌ์šฐ +์ง€๋ณ‘ +์„ ๊ณก +ํ•œํ•ด์„œ +์™€์ด์ฆˆ +๋„ˆ๋„ +์น˜๋ฃจ +##ros +๊ธฐ๊ทผ +๋–จ์–ด๋œจ๋ ค +์ฐพ์•„๋ต™ +๋ฎค์ง€์—„ +๊ต๋ณด๋ฌธ๊ณ  +์ƒค์ธ +์˜ค์žฅ +๋‹น๊ตฌ +๋ฐ‘๋ฐ”ํƒ• +๋ถˆ๋Ÿฌ๋„ +๊ฐœํ•™ +๋„ํ•‘ +์ƒํ™”ํ•™ +##๋ž˜๊ธฐ +์ง‘์‚ฌ๋žŒ +Company +์œค์ฃผ +ํ•˜ํ’ˆ +ํ˜„์„ธ +๋ถ„์ ˆ +๋™์˜๋ณด๊ฐ +์ด์šฉํ•ด +KIS +์•„์”จ +๋ถ€์‚ฌ๊ด€ +์›๋ชฉ +์ŠคํŽœ +์œก์ฆ™ +์–ธ์„œ +๋Œ€ํ•ญ์ „ +์•Œ๋ ‰์‚ฐ๋“œ๋กœ์Šค +๋ฐœ๋ Œ์‹œ์•„ +์ทจ๋ฆฌํžˆ +##ํŒฌ์ง€ +์ •๋ก  +์˜ˆ์ˆ˜ํšŒ +๋†๋ฏผ๊ตฐ +์šฐ์—ด +๋ฒ…์ฐจ +๊ฒฝ๋ถ€์„  +๋งˆํ‹ฐ์Šค +##์•Œ๋ผ +๊ฒŒ์ด๋จธ +๋‹ค์ด๋‚ด๋ฏน +Ear +AV +์‹œ์˜ +์‹ค๋ž‘์ด +##๋„ŒํŠธ +์ขŒํšŒ์ „ +๊ทธ๋Ÿฌ๋‹ˆ๊น +์ปต์Šค +##์˜ค๋ž˜ +๋ฐ๋ ค์™” +์ƒ์•”๋™ +##์†Œ๋‹‰ +์ œ์‚ฌ์žฅ +๋’ท๊ฑธ์Œ์งˆ +๋น„์Šค๋“ฌํžˆ +๋™๋ฐฉ์‹ ๊ธฐ +๊ธ๊นŒ +์…”์š” +๊ฐ€์ฃผ +ํƒฑํƒฑ +์ „๊ทผ๋Œ€ +##ํ• ๋ฆฐ +๊ฐ–์ถฐ์ง„ +Gold +์Œ€๊ฐ’ +##์ ˆ๋ฏธ +244 +์‹ ์ง„๋Œ€์‚ฌ +EPL +์—ฅ๊ฒ” +##EG +๊ณ ๋‚œ๋„ +๊ฒจ๋ ˆ +์• ์•  +์ธ๋ฉธ +##vir +##๋ถˆํ‰ +##ํ‰๋ถˆํ‰ +์„€๋„์šฐ +fac +ํ–‰์ฐจ +ํ™ฉ๋ฌด์ง€ +๋งŒํ™”๊ฐ€ +๊ฐธ์šฐ๋šฑ +๋งํ†  +์ž‰์—ฌ๊ธˆ +๋“์–ด +์„ธ๊ฐ€ +๋‹ฌ์„œ +๋’ค๋–จ +์‚ดํ•„ +์‚ด์ธ์ž +ํ—ˆ๊ฒ +์ด๊ฒฝ +ํ”Œ๋ ˆ์ดํŠธ +Del +๊ณจ์žฌ +ํ† ๊ฑด +##์šด์˜ +๋งˆ์ดˆ +๋งˆ๋ˆ๋‚˜ +๊ณผ๋ฐ€ +์šธํ‰๋ถˆํ‰ +์ฒ ์กฐ +MR +๋นผ๋จน +##๋‚˜์—ฐ +##๋‚˜๋ฏธ +๋‹ค๋…€๊ฐ” +๋Œ์–ด์˜ฌ๋ฆฐ +์ซ“๊ฒจ๋‚œ +ํ™œ๊ธฐ์ฐจ +๊ธฐ์Šค +##์Šฌ๊ธฐ +์ „ํˆฌ๋ ฅ +์„œ์ฃผ +ใ†๊ฒฝ +๋‚จํ–ฅ +๊ดดํ•œ +์•Œ๊ฐฑ์ด +๋น„ํ†ต +์Œ๊บผํ’€ +ํด๋” +์ฃผ์ข… +๋ฐ๋ฅด +##๋…ธ๋ฆฌ +ใ†ํ•˜ +๋„ˆ๊ตฌ๋ฆฌ +๋”ฐ๋‹˜ +์นจํŒฌ์ง€ +ํ•จ์„  +๋งˆ๊ตฌ์žก์ด +์ฐฉ์‹ค +Technology +์ˆœ์ฐฝ +์•ˆ๋‚ด์„œ +๋‚จ์ƒ +ํ‘œํ•˜ +ํ•˜๊ฐ +๋‹ˆ์ฝœ๋ผ์Šค +##์Šคํ…Œ์ด +์ฒญ์ง„ +Mil +์ง€์ฐธ +ํฌ๋ฆฌ์Šคํ„ธ +์žฅ๋™๊ฑด +๊ฐ์‚ฌ +์‹ญ์ด +์•„์›ƒ๋ › +๋ฐ›์•„๋“ค์—ฌ์กŒ +์•„๋ฐ +๋ง‘์Šค +##๊ฒŒํŠธ +##ini +๊ฒŒ๋†ˆ +์ž์—ฐ์‚ฌ +์บ ๋ฒจ +##ํ˜€์„œ +์–ด๋ฃจ๋งŒ์ง€ +๊ถŒ์„ฑ +##ka +##์ฒœํ›„ +์†กํ˜œ +West +์˜ฌ์ง€ +์ดŒ๋ฝ +๋“œ๋ ˆ์‹ฑ +๋™๊ทธ๋ผ๋ฏธ +๋žœ๋ค +์•„๋ฆฐ +๋ฝ€๋กœ๋กœ +์‚ฌ๋ง๋ฅ  +๊ณค์š• +๋‹ค๋น„๋“œ +์Šฌ๋ฉฐ์‹œ +๊ถŒ๋Šฅ +##๊ณ„๋ฆฌ +๊ตฌ์™€ +์ง€๋ ์ด +๊ทน์žฅํŒ +์‚ฌ์Šค +์ˆจ๊ฒผ +์ง„๊ตฌ +ํ™˜๊ฐ‘ +์„ธ๋ผ +์น˜์šฐ์น˜ +##๋‹น์„  +##๋ฒ„๋ฅธ +stud +๋ง์„ค์˜€ +์‹œํผ +์ ๋ ค +ํฌ์›Œ๋“œ +์™ธ๊ฐ€ +ํ•ดํ•™ +๊ฐœ๊ฐ„ +์—๋ด +์ข…๊ตญ +ํ™ฉ์„  +๋“œ๋ฆฌ๋ธ” +๋ฐ•์„๋ฏผ +์„œ๋ ค +##ason +์•ผ์‹œ์žฅ +๋ชฉ๋„ +์ด์ฐฝํ˜ธ +๋ช…์–ธ +์œ„๋ น +##๊ฐ๊ฐ +์ธ์žฅ +์ฒ ๊ธฐ +์กฐ๋ฆฌ๋ฒ• +์ „๊ตฐ +##๋กœ์›  +์€์ƒ +Bank +##๋ถ€์ ˆ +์‚ฌ๊ณต +์•”๋ชจ +๊ณ ์†ํ™” +๋ฐฉ๋ฐฐ +์–ด์ข… +์—ฐ๋งค +๊ณต๋ฐฉ์ „ +##๊ณ„์‚ฐ +์ค‘๋Œ€์žฅ +๊ด€ํ˜„์•…๋‹จ +์‹œ๋ณด +ํ†ต์† +TEST +๋ณด๋ฐฐ +์ฐํžˆ +cons +ํŠน์ข… +229 +239 +์†Œ๊ตญ +ํ’€๋ ค๋‚ฌ +๊ฐ„ํ–‰๋ฌผ +๋‹ฌ๋ผ์ด +๋ธŒ๋žŒ +์ตœ์ƒ๊ธ‰ +1868 +๊ทน์ง„ +ํœ˜๋‚ ๋ฆฌ +๊ณต๊ณต์—ฐํžˆ +760 +##์—ฐ์ˆ˜ +์ธ์‚ฌ์ด๋“œ +##ํ•˜์ด์˜ค +๊ตฌ์Šคํƒ€ +์ €๋‹น๊ถŒ +์ŠคํŒŸ +๋‹ค๋นˆ์น˜ +์™ผํŽธ +##OM +์˜ฌ๋ฆฌ๋ฒ„ +๋ ˆ์ธ์ง€ +##๋‚˜๋„ +##iver +ํ™”๊ธฐ์• ์•  +๋–จ์ณค +ํ•ตํƒ„๋‘ +##์‹œ์ค‘ +##์€๋•€ +๊ฐ์ •ํ‰๊ฐ€ +๋Œ€์ฒฉ +์„ธ๋‚œ +1592 +๋ฏธ์™„์„ฑ +๋™์ˆ˜ +์„ธ๋ ˆ +ํ˜„์ง„ +##OA +๋ผ๋Š”๋ฐ +์ •๋ถ€์•ˆ +๋Œ€๋Œ€์žฅ +๋ฒ ์ด๋น„๋ถ +life +๊ฑฐ๋ ค +์˜ˆ์กฐ +์œ ๋…„๊ธฐ +๋…ธ์•ฝ์ž +ํ˜ธ๋“ค๊ฐ‘ +##๋Œ€์›๊ตฐ +ํฌ๋ฆฌ์Šคํ‹ด +Great +์‹์€๋•€ +##๋ฐ”์‹œ +์—ฌ์ œ +ํ†ต๋‹ญ +๊ธˆ๋‚จ +##์ง€๊ฒ +๋งˆ๋„ค +๋ถ„๋ž€ +##by +ํ…Œ๋Ÿฌ๋ฆฌ์ฆ˜ +๋ณผ์…ฐ๋น„ํ‚ค +์ƒ๋… +ํŒŒ๋“œ +์ถœ์—ฐ์ž +์ด๊ฒจ๋‚ผ +๋งค์šดํƒ• +์นด๋ฉ”๋ฃฌ +๊ฑด์žฅ +์ด๊ฐ๋… +##ano +๋ฌด์•ˆํƒ€ +๋ฐฐ์„œ +๋ฐ”์ดํ‚น +490 +๋‚ด๋ฆฌ๋ง‰๊ธธ +##๋ฉ”๋ž„ +ํ—ˆ๊ฒ์ง€๊ฒ +๋‚จ๊ฐ• +์ฐจ๋Œ +๋ ˆ์ธ์ €์Šค +ํ•˜ํ† ์•ผ๋งˆ +๊นŒ๋ฅด +๋ฒŒ์ง‘ +์•„์ฐจ +๋„ˆ๋ผ +##์ ค๋กœ +์˜ค๋ž˜์˜ค๋ž˜ +์—ฌ์ง€์—†์ด +์ƒ์ˆ  +์ค‘ํ„ฑ +์›ƒ๊ธด +๊ทธ๋Ÿด์ˆ˜๋ก +๋ฆฌ์ฐจ๋“œ +๋ผ์‹œ +๋ผ๋น„ +์›จ์Šฌ๋ฆฌ +##์ƒˆ๋ผ +๋…ธ๋™๋ฒ• +##ํ•˜์ž„ +๋ชฉํ‘œ๋ฌผ +์˜ด๋‹ˆ +์ •ํ›ˆ +๋˜๋„๋ก +๊ฒฉํ•œ +์˜ฅ์ค‘ +ํ•˜์Šค +๋‚จ๋ฌธ +๋ณ‘๋ณ€ +SAT +๋Œ€์œ  +##๋Ÿฌ์šฐ +์™ธ์‹์—… +์žฌ๋ž˜์‹ +Bri +๋งŒ๋ฌด +์„ค์š• +##์ด๋‹ค +##๊ณ ์–‘์ด +์ž„๋Œ€์ธ +๋งคํšŒ +์˜คํ•˜์ด์˜ค +243 +๋จธ์Šคํฌ +์ค„์ธ๋‹ค +๋Š”๋‹ค๋ฉฐ +Big +ํ•œ๊ฐ€์œ„ +๋ณด์˜จ์„ฑ +๋ถ„์› +249 +ํ™”๋‚ด +๋Œ€๊ฐ€๋ฆฌ +##๋ฏผ์ •์Œ +์ด์„ฑ +๋ฏธ๋ค˜ +์• ์“ด +์—ญ๋ช… +์—์Šคํ”„๋ ˆ์†Œ +๊ตฐ๋ฌด +์›ฐ์Šค +##์ ˆ๋ถ€์ ˆ +242 +ํ—ˆ์ •๋ฌด +ํฝ๋‹ˆ๋‹ค +๋ฏธ๋™ +์‹œํ‹ฐ์ฆŒ +Ref +์•ˆ์ ˆ๋ถ€์ ˆ +์• ๋กœ +์™„๊ณ  +๋Œ์–ด๋‹น๊ธฐ +๋Œ๋ ค์„œ +๊น€ํƒœํฌ +๋ผ์šฐ +##์—”์ง€๋‹ˆ์–ด +๋ฏธํ‹ฐ +##์žฅ๊ณ  +๋†์‚ฌ์ผ +์šฐ์ƒํ˜ธ +์•„์ด์–ธ๋งจ +๋งˆ์นจํ‘œ +๋ฐœ๋ Œํƒ€์ธ +ํ•ด๋ฆฌ์Šค +##์—‰์ด +์กฐ๊น… +์—ฌํƒœ๊ป +๊ฐ€๋ง +ํ–‰์ž๋ถ€ +์•ŒํŽœ์‹œ์•„ +๋‘˜๋Ÿฌ์•‰ +๊ฐ•์ง€ +ํŒจํ‹ฐ +์žฅํ•™์‚ฌ +BL +๋‚™ํญ +๋ง๋ž‘ +๊ดด์ƒ +##๋ผ์ด๋” +๋‚จ๊ธด๋‹ค +์ƒŒ๋”์Šค +ํ‰๋ช… +์ดํฌ +์นด๋กค +##ability +์—ผํ™” +์ขŒ์ต์ˆ˜ +๊ฐ€๊ฐ +๊ฐ์ •๊ฐ€ +์–ต์–‘ +##ues +๋ณด์˜€์œผ๋‚˜ +##ha +ํŽ„๋Ÿญ +์ž„์‹ ๋ถ€ +์†Œ๊ฐœํŒ… +str +๋Š”๊ฑธ +๋น„์Šคํƒ€ +์—ฌ๋ฅ˜ +##๊ฒŒ์ด๋จธ +์†์‚ฌ +์ƒ๋‹ด์†Œ +๋ฐฉ์„ +์ง€๊ฒŒ +๋ฏธ๋„ค๋ฅด๋ฐ” +๋ณ€๋ณ„๋ ฅ +ํ”„๋กœ๊ฒŒ์ด๋จธ diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..ce114ab --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,30 @@ +[tool.black] +line-length = 120 +target-version = ['py36', 'py37', 'py38'] +include = '\.py$' +exclude = ''' +/( + \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist + | env + | venv +)/ +''' + +[tool.isort] +multi_line_output = 3 +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true +ensure_newline_before_comments = true +line_length = 120 +known_typing = ["typing", "types", "typing_extensions", "mypy", "mypy_extensions"] +sections = ["FUTURE", "TYPING", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] +profile = "black" diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..e6de360 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,3 @@ +[flake8] +extend-ignore = E203, W503, E501, E231, E402, E731, E266 +max-line-length = 120