From ec27891ce28b85991816f59125a925d5f87c8deb Mon Sep 17 00:00:00 2001 From: Kazem Jahanbakhsh Date: Wed, 12 Aug 2020 15:43:08 -0700 Subject: [PATCH 1/7] run experiments with gpt-2 --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 6d998ece1..731648b88 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,7 @@ +# GPT-2 experiments +In this repo we are running experiment with GPT-2 language modle to compare it with other language model such as BERT, GPT-3, T5 etc. + + **Status:** Archive (code is provided as-is, no updates expected) # gpt-2 From 0f0c209cc2a2e6634d13f4e25df60e78e23f745a Mon Sep 17 00:00:00 2001 From: Kazem Jahanbakhsh Date: Wed, 12 Aug 2020 15:46:22 -0700 Subject: [PATCH 2/7] Created using Colaboratory --- experiments.ipynb | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 experiments.ipynb diff --git a/experiments.ipynb b/experiments.ipynb new file mode 100644 index 000000000..05de7774b --- /dev/null +++ b/experiments.ipynb @@ -0,0 +1,41 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "experiments.ipynb", + "provenance": [], + "authorship_tag": "ABX9TyMMkhc0O5aOe9UDmu0n85xv", + "include_colab_link": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "TXdOgLU16LTU", + "colab_type": "code", + "colab": {} + }, + "source": [ + "" + ], + "execution_count": null, + "outputs": [] + } + ] +} \ No newline at end of file From ab63b8fca8792bd313c2a6f561b4349d6204928f Mon Sep 17 00:00:00 2001 From: Kazem Jahanbakhsh Date: Wed, 12 Aug 2020 15:57:08 -0700 Subject: [PATCH 3/7] Created using Colaboratory --- experiments.ipynb | 218 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 215 insertions(+), 3 deletions(-) diff --git a/experiments.ipynb b/experiments.ipynb index 05de7774b..428a03d01 100644 --- a/experiments.ipynb +++ b/experiments.ipynb @@ -5,7 +5,8 @@ "colab": { "name": "experiments.ipynb", "provenance": [], - "authorship_tag": "ABX9TyMMkhc0O5aOe9UDmu0n85xv", + "collapsed_sections": [], + "authorship_tag": "ABX9TyNyqfR4ukeBdj/bl+OZSrgt", "include_colab_link": true }, "kernelspec": { @@ -24,15 +25,226 @@ "\"Open" ] }, + { + "cell_type": "markdown", + "metadata": { + "id": "0gIuWx4L3ADG", + "colab_type": "text" + }, + "source": [ + "In this Jupyter notebook you can play around with of Open AI's GPT-2 Language Model from the paper Language Models are Unsupervised Multitask Learners. You'll be able to choose between the small (117M parameters) , medium (345M parameters), large (774M parameters) and XL versions (1.5B parameters) version of GPT-2." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Bpz5FNla3Bp3", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "ff208699-202b-4b51-e157-b4f49da5faf6" + }, + "source": [ + "!git clone https://github.com/ilopezfr/gpt-2/\n", + "import os\n", + "os.chdir('gpt-2')\n", + "# !python download_model.py 117M\n", + "!python download_model.py 124M\n", + "!python download_model.py 345M\n", + "# !python download_model.py 774M\n", + "# !python download_model.py 1558M\n", + "!pip3 install -r requirements.txt\n", + "!pip3 install tensorflow==1.13.1" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Cloning into 'gpt-2'...\n", + "remote: Enumerating objects: 310, done.\u001b[K\n", + "remote: Total 310 (delta 0), reused 0 (delta 0), pack-reused 310\u001b[K\n", + "Receiving objects: 100% (310/310), 4.63 MiB | 5.34 MiB/s, done.\n", + "Resolving deltas: 100% (174/174), done.\n", + "Fetching checkpoint: 1.00kit [00:00, 917kit/s] \n", + "Fetching encoder.json: 1.04Mit [00:00, 44.1Mit/s] \n", + "Fetching hparams.json: 1.00kit [00:00, 1.06Mit/s] \n", + "Fetching model.ckpt.data-00000-of-00001: 498Mit [00:08, 57.3Mit/s] \n", + "Fetching model.ckpt.index: 6.00kit [00:00, 4.85Mit/s] \n", + "Fetching model.ckpt.meta: 472kit [00:00, 36.0Mit/s] \n", + "Fetching vocab.bpe: 457kit [00:00, 41.6Mit/s] \n", + "Fetching checkpoint: 1.00kit [00:00, 1.03Mit/s] \n", + "Fetching encoder.json: 1.04Mit [00:00, 37.2Mit/s] \n", + "Fetching hparams.json: 1.00kit [00:00, 920kit/s] \n", + "Fetching model.ckpt.data-00000-of-00001: 1.42Git [00:36, 38.9Mit/s] \n", + "Fetching model.ckpt.index: 11.0kit [00:00, 8.15Mit/s] \n", + "Fetching model.ckpt.meta: 927kit [00:00, 26.1Mit/s] \n", + "Fetching vocab.bpe: 457kit [00:00, 39.0Mit/s] \n", + "Collecting fire>=0.1.3\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/34/a7/0e22e70778aca01a52b9c899d9c145c6396d7b613719cd63db97ffa13f2f/fire-0.3.1.tar.gz (81kB)\n", + "\u001b[K |████████████████████████████████| 81kB 4.3MB/s \n", + "\u001b[?25hCollecting regex==2018.1.10\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/76/f4/7146c3812f96fcaaf2d06ff6862582302626a59011ccb6f2833bb38d80f7/regex-2018.01.10.tar.gz (612kB)\n", + "\u001b[K |████████████████████████████████| 614kB 14.2MB/s \n", + "\u001b[?25hCollecting requests==2.21.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/7d/e3/20f3d364d6c8e5d2353c72a67778eb189176f08e873c9900e10c0287b84b/requests-2.21.0-py2.py3-none-any.whl (57kB)\n", + "\u001b[K |████████████████████████████████| 61kB 5.9MB/s \n", + "\u001b[?25hCollecting tqdm==4.31.1\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/6c/4b/c38b5144cf167c4f52288517436ccafefe9dc01b8d1c190e18a6b154cd4a/tqdm-4.31.1-py2.py3-none-any.whl (48kB)\n", + "\u001b[K |████████████████████████████████| 51kB 5.4MB/s \n", + "\u001b[?25hRequirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from fire>=0.1.3->-r requirements.txt (line 1)) (1.15.0)\n", + "Requirement already satisfied: termcolor in /usr/local/lib/python3.6/dist-packages (from fire>=0.1.3->-r requirements.txt (line 1)) (1.1.0)\n", + "Requirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests==2.21.0->-r requirements.txt (line 3)) (1.24.3)\n", + "Requirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests==2.21.0->-r requirements.txt (line 3)) (3.0.4)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests==2.21.0->-r requirements.txt (line 3)) (2020.6.20)\n", + "Collecting idna<2.9,>=2.5\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/14/2c/cd551d81dbe15200be1cf41cd03869a46fe7226e7450af7a6545bfc474c9/idna-2.8-py2.py3-none-any.whl (58kB)\n", + "\u001b[K |████████████████████████████████| 61kB 5.3MB/s \n", + "\u001b[?25hBuilding wheels for collected packages: fire, regex\n", + " Building wheel for fire (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for fire: filename=fire-0.3.1-py2.py3-none-any.whl size=111005 sha256=8999f1fb382b963552e7459269f1776a225e6eea22a078d8fa96fb6f59cfe6c0\n", + " Stored in directory: /root/.cache/pip/wheels/c1/61/df/768b03527bf006b546dce284eb4249b185669e65afc5fbb2ac\n", + " Building wheel for regex (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for regex: filename=regex-2018.1.10-cp36-cp36m-linux_x86_64.whl size=547978 sha256=f8e982a9ad237c3ee0e5416168dc653c6ae8ea27f40432edfa36c063329b0e72\n", + " Stored in directory: /root/.cache/pip/wheels/74/17/3f/c77bba99efd74ba1a19862c9dd97f4b6d735e2826721dc00ff\n", + "Successfully built fire regex\n", + "\u001b[31mERROR: spacy 2.2.4 has requirement tqdm<5.0.0,>=4.38.0, but you'll have tqdm 4.31.1 which is incompatible.\u001b[0m\n", + "\u001b[31mERROR: google-colab 1.0.0 has requirement requests~=2.23.0, but you'll have requests 2.21.0 which is incompatible.\u001b[0m\n", + "\u001b[31mERROR: datascience 0.10.6 has requirement folium==0.2.1, but you'll have folium 0.8.3 which is incompatible.\u001b[0m\n", + "Installing collected packages: fire, regex, idna, requests, tqdm\n", + " Found existing installation: regex 2019.12.20\n", + " Uninstalling regex-2019.12.20:\n", + " Successfully uninstalled regex-2019.12.20\n", + " Found existing installation: idna 2.10\n", + " Uninstalling idna-2.10:\n", + " Successfully uninstalled idna-2.10\n", + " Found existing installation: requests 2.23.0\n", + " Uninstalling requests-2.23.0:\n", + " Successfully uninstalled requests-2.23.0\n", + " Found existing installation: tqdm 4.41.1\n", + " Uninstalling tqdm-4.41.1:\n", + " Successfully uninstalled tqdm-4.41.1\n", + "Successfully installed fire-0.3.1 idna-2.8 regex-2018.1.10 requests-2.21.0 tqdm-4.31.1\n", + "Collecting tensorflow==1.13.1\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/77/63/a9fa76de8dffe7455304c4ed635be4aa9c0bacef6e0633d87d5f54530c5c/tensorflow-1.13.1-cp36-cp36m-manylinux1_x86_64.whl (92.5MB)\n", + "\u001b[K |████████████████████████████████| 92.5MB 103kB/s \n", + "\u001b[?25hRequirement already satisfied: numpy>=1.13.3 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.18.5)\n", + "Requirement already satisfied: protobuf>=3.6.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (3.12.4)\n", + "Requirement already satisfied: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.31.0)\n", + "Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.34.2)\n", + "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.1.2)\n", + "Collecting keras-applications>=1.0.6\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/71/e3/19762fdfc62877ae9102edf6342d71b28fbfd9dea3d2f96a882ce099b03f/Keras_Applications-1.0.8-py3-none-any.whl (50kB)\n", + "\u001b[K |████████████████████████████████| 51kB 5.2MB/s \n", + "\u001b[?25hCollecting tensorflow-estimator<1.14.0rc0,>=1.13.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/bb/48/13f49fc3fa0fdf916aa1419013bb8f2ad09674c275b4046d5ee669a46873/tensorflow_estimator-1.13.0-py2.py3-none-any.whl (367kB)\n", + "\u001b[K |████████████████████████████████| 368kB 58.0MB/s \n", + "\u001b[?25hRequirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.15.0)\n", + "Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.1.0)\n", + "Collecting tensorboard<1.14.0,>=1.13.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/0f/39/bdd75b08a6fba41f098b6cb091b9e8c7a80e1b4d679a581a0ccd17b10373/tensorboard-1.13.1-py3-none-any.whl (3.2MB)\n", + "\u001b[K |████████████████████████████████| 3.2MB 57.7MB/s \n", + "\u001b[?25hRequirement already satisfied: absl-py>=0.1.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.9.0)\n", + "Requirement already satisfied: astor>=0.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.8.1)\n", + "Requirement already satisfied: gast>=0.2.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.3.3)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.6.1->tensorflow==1.13.1) (49.2.0)\n", + "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from keras-applications>=1.0.6->tensorflow==1.13.1) (2.10.0)\n", + "Collecting mock>=2.0.0\n", + " Downloading https://files.pythonhosted.org/packages/cd/74/d72daf8dff5b6566db857cfd088907bb0355f5dd2914c4b3ef065c790735/mock-4.0.2-py3-none-any.whl\n", + "Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.14.0,>=1.13.0->tensorflow==1.13.1) (1.0.1)\n", + "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.14.0,>=1.13.0->tensorflow==1.13.1) (3.2.2)\n", + "Requirement already satisfied: importlib-metadata; python_version < \"3.8\" in /usr/local/lib/python3.6/dist-packages (from markdown>=2.6.8->tensorboard<1.14.0,>=1.13.0->tensorflow==1.13.1) (1.7.0)\n", + "Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<1.14.0,>=1.13.0->tensorflow==1.13.1) (3.1.0)\n", + "Installing collected packages: keras-applications, mock, tensorflow-estimator, tensorboard, tensorflow\n", + " Found existing installation: tensorflow-estimator 2.3.0\n", + " Uninstalling tensorflow-estimator-2.3.0:\n", + " Successfully uninstalled tensorflow-estimator-2.3.0\n", + " Found existing installation: tensorboard 2.3.0\n", + " Uninstalling tensorboard-2.3.0:\n", + " Successfully uninstalled tensorboard-2.3.0\n", + " Found existing installation: tensorflow 2.3.0\n", + " Uninstalling tensorflow-2.3.0:\n", + " Successfully uninstalled tensorflow-2.3.0\n", + "Successfully installed keras-applications-1.0.8 mock-4.0.2 tensorboard-1.13.1 tensorflow-1.13.1 tensorflow-estimator-1.13.0\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "5R-d2cdU3PKn", + "colab_type": "text" + }, + "source": [ + "**Unconditional sample generatio**\n", + "\n", + "To generate unconditional samples from the small model, we need to run: \n", + "\n", + "`!python3 src/generate_unconditional_samples.py`" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "zuoq4NWB3YG6", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!python3 src/generate_unconditional_samples.py --model_name='124M' --nsamples=2 --top_k=40 --temperature=0.7 | tee samples" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "SPDoOaAl4T1Y", + "colab_type": "text" + }, + "source": [ + "**Conditional sample generation**\n", + "\n", + "To generate conditional samples from the small model:\n", + "\n", + "`!python3 src/interactive_conditional_samples.py`" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "wuhdgk8b4n38", + "colab_type": "text" + }, + "source": [ + "### Text Completion\n", + "\n", + "- Context: random unseen text\n", + "\n", + "Sample prompt 1: \n", + "```\n", + "In a shocking finding, scientist discovered a herd of unicorns living in a remote, previously unexplored valley, in the Andes Mountains. Even more surprising to the researchers was the fact that the unicorns spoke perfect English.\n", + "```\n", + "\n", + "Sample prompt 2: ([*Voight-Kampff test*](https://www.youtube.com/watch?v=Umc9ezAyJv0))\n", + "\n", + "```\n", + "You're in a desert, walking along in the sand, when all of a sudden you look down and see a tortoise, Leon. It's crawling toward you. You reach down, you flip the tortoise over on its back. The tortoise lays on its back, its belly baking in the hot sun, beating its legs trying to turn itself over, but it can’t, not without your help. But you’re not helping. Why is that? " + ] + }, { "cell_type": "code", "metadata": { - "id": "TXdOgLU16LTU", + "id": "bxhvxQlv4rl-", "colab_type": "code", "colab": {} }, "source": [ - "" + "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=2 --top_k=100 --temperature=1" ], "execution_count": null, "outputs": [] From a8c2166332d4a06aa196f2b986889c77db702cb2 Mon Sep 17 00:00:00 2001 From: Kazem Jahanbakhsh Date: Wed, 12 Aug 2020 16:57:22 -0700 Subject: [PATCH 4/7] Created using Colaboratory --- experiments.ipynb | 677 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 665 insertions(+), 12 deletions(-) diff --git a/experiments.ipynb b/experiments.ipynb index 428a03d01..b930c11ae 100644 --- a/experiments.ipynb +++ b/experiments.ipynb @@ -6,7 +6,7 @@ "name": "experiments.ipynb", "provenance": [], "collapsed_sections": [], - "authorship_tag": "ABX9TyNyqfR4ukeBdj/bl+OZSrgt", + "authorship_tag": "ABX9TyOXuz5tcoJrAzViCTOj8QYt", "include_colab_link": true }, "kernelspec": { @@ -184,7 +184,9 @@ "\n", "To generate unconditional samples from the small model, we need to run: \n", "\n", - "`!python3 src/generate_unconditional_samples.py`" + "`!python3 src/generate_unconditional_samples.py`\n", + "\n", + "See below for some text that has been generated by GPT-2 model." ] }, { @@ -192,13 +194,120 @@ "metadata": { "id": "zuoq4NWB3YG6", "colab_type": "code", - "colab": {} + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "ac28f9c5-8595-437c-8eec-157d0a45aae7" }, "source": [ "!python3 src/generate_unconditional_samples.py --model_name='124M' --nsamples=2 --top_k=40 --temperature=0.7 | tee samples" ], - "execution_count": null, - "outputs": [] + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:526: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:527: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:528: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:529: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:530: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:535: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n", + "2020-08-12 22:57:36.388236: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA\n", + "2020-08-12 22:57:36.391567: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2249995000 Hz\n", + "2020-08-12 22:57:36.391794: I tensorflow/compiler/xla/service/service.cc:150] XLA service 0x1ed8680 executing computations on platform Host. Devices:\n", + "2020-08-12 22:57:36.391822: I tensorflow/compiler/xla/service/service.cc:158] StreamExecutor device (0): , \n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Colocations handled automatically by placer.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:64: to_float (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.cast instead.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:67: multinomial (from tensorflow.python.ops.random_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.random.categorical instead.\n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/saver.py:1266: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use standard file APIs to check for files with this prefix.\n", + "======================================== SAMPLE 1 ========================================\n", + "We've seen some amazing things in the last few years, but it's been a while since we had a chance to take a look at a new video from a new game that hasn't been released yet. This is the sequel to the original game and is slated for release on March 9th, 2016 for PlayStation 4 and Xbox One.\n", + "\n", + "In the video, you play as two characters of the original video game. The first character is named Kiyotaka, a young, white male who is known as \"Kiyotaka\" by the fans. The second character is named Takayuki, a young black male named Kiyotaka who is also known as \"Kiyotaka\" by the fans.\n", + "\n", + "In the last video, you play as the main character of the original video game, Yuzuri Shigeto. His name is the same as the name of the game \"Nendoroid\" which you have seen in the past, but you can also use it as a name for your character in the video.\n", + "\n", + "The only new thing to note is that the new video was created by a team of fans from the original video game. This is why we created our own video for this game and did not use the same name for the game that we used in the original game.\n", + "\n", + "The new video features a new music video for the game that will be released in April 2016. Check out the video below.\n", + "\n", + "This game is scheduled for release on April 8th, 2016 for PlayStation 4 and Xbox One. It will be available on PlayStation Store as well as the official website for the game. As you can see, you can download the game right here for free.\n", + "\n", + "Source: Eurogamer<|endoftext|>The New York Times has a story on this one, and the story is pretty interesting. Here's the story on the first page of the story:\n", + "\n", + "The U.S. has a long history of fighting terrorism after World War II. It ended with the U.S. launching an amphibious assault in 1942. It was at that time not only the first amphibious attack in the war, but the first time U.S. forces captured or killed a terrorist. The amphibious assault was a major setback to the U.S. in World War II.\n", + "\n", + "This is the same story that has been repeating for many years, including this one from the Daily Mail, and this one from the New York Times:\n", + "\n", + "The U.S. military has not been willing to reveal that the U.S. military had used its bases in Afghanistan or Iraq for more than a decade to carry out an attack, the Pentagon said Tuesday, despite repeated requests from Congress.\n", + "\n", + "The U.S. military has not been willing to reveal that the U.S. military had used its bases in Afghanistan or Iraq for more than a decade to carry out an attack, the Pentagon said Tuesday, despite repeated requests from Congress.\n", + "\n", + "The Times story is really a classic example of the U.S. thinking it can control the world without having to worry about losing its moral compass.\n", + "\n", + "It does, however, make you wonder whether the U.S. is the only country in the world that believes that its people have been given the right to carry out atrocities.\n", + "\n", + "The U.S. is a country that believes that its people have been given the right to carry out atrocities.<|endoftext|>On Friday, a group of American business leaders and journalists were killed in a \"shooting at a restaurant in New York City\" that left at least five members of Congress dead.\n", + "\n", + "It's not clear who fired the fatal shots, but the report came as a surprise to many who knew the attack as a result of what they were told by the media.\n", + "\n", + "The attack occurred on the 32nd floor of the U.S. Capitol building in the East Wing, a building used by the National Security Council — the administration's top counterterrorism adviser in charge of the country's foreign policy.\n", + "\n", + "The attack was the fourth death in less than a year in the U.S. Capitol — the most recent being a man shot in a Washington, D.C., nightclub in September.\n", + "\n", + "On Friday, the president of the House of Representatives, Rep. Mike Rogers, R-Mich., took to Twitter to apologize for the attack, claiming that the attack was \"just a misunderstanding.\"\n", + "\n", + "The attack was the fourth in five months in the U.S. Capitol. It's the second in as many months, and the third in as many weeks. #Shooting at a restaurant in New York City. No one was hurt. — Mike Rogers (@RepMikeRogers) February 6, 2017\n", + "\n", + "A spokeswoman for the U.S. Capitol Police said Friday afternoon that the Capitol Police were investigating the incident.\n", + "\n", + "\"We're aware of the shooting that occurred at a restaurant in New York City,\" said spokeswoman Kelly Lovett. \"We know that this is an isolated incident that\n", + "======================================== SAMPLE 2 ========================================\n", + "Taken together, the two stories in this collection are the first and second of three stories, based on the book of Genesis. Both stories are based on God's promise that he would not abandon his original promise, that the people of Jerusalem would be as faithful as he had promised them.\n", + "\n", + "The second story, an account of a Jewish revolt against the Assyrians in the third century BC, is based on the story of an Assyrian priest who became the first chief rabbi of Jerusalem. The story takes place in the second century BC, when the Assyrians conquered Jerusalem. The story takes place in the second century AD, when the Assyrians invaded Jerusalem and established a state of peace between the two countries.\n", + "\n", + "The third story, an account of the conquest by the Assyrians in the first century BC, is based on the story of a young man named Isaac who was raised in a land of the Holy Land. The story takes place in the third century AD, when the Assyrians invaded Jerusalem and established a state of peace between the two countries.\n", + "\n", + "The fourth story, the account of the conquest by the Assyrians in the third century BC, takes place in the third century AD, when the Assyrians invaded Jerusalem and established a state of peace between the two countries. It is a story of a man named Isaac who led his people against the Assyrians. The story is based on the story of a man named Isaac who led his people against the Assyrians.\n", + "\n", + "The fifth story, the story of the conquest by the Assyrians in the seventh century BC, takes place in the seventh century AD, when the Assyrians conquered Jerusalem and established a state of peace between the two countries. It is based on the story of a man named Isaac who led his people against the Assyrians. The story is based on the story of a man named Isaac who led his people against the Assyrians.\n", + "\n", + "The sixth story, the story of the conquest by the Assyrians in the seventh century AD, takes place in the seventh century AD, when the Assyrians conquered Jerusalem and established a state of peace between the two countries. It is based on the story of a man named Isaac who led his people against the Assyrians. The story is based on the story of a man named Isaac who led his people against the Assyrians.\n", + "\n", + "The seventh story, the story of the conquest by the Assyrians in the seventh century AD, takes place in the seventh century AD, when the Assyrians conquered Jerusalem and established a state of peace between the two countries. It is based on the story of a man named Isaac who led his people against the Assyrians. The story is based on the story of a man named Isaac who led his people against the Assyrians.\n", + "\n", + "The eighth story, the story of the conquest by the Assyrians in the eighth century AD, takes place in the eighth century AD, when the Assyrians conquered Jerusalem and established a state of peace between the two countries. It is based on the story of a man named Isaac who led his people against the Assyrians. The story is based on the story of a man named Isaac who led his people against the Assyrians.\n", + "\n", + "The ninth story, the story of the conquest by the Assyrians in the ninth century AD, takes place in the ninth century AD, when the Assyrians conquered Jerusalem and established a state of peace between the two countries. It is based on the story of a man named Isaac who led his people against the Assyrians. The story is based on the story of a man named Isaac who led his people against the Assyrians.\n", + "\n", + "The tenth story, the story of the conquest by the Assyrians in the tenth century AD, takes place in the tenth century AD, when the Assyrians conquered Jerusalem and established a state of peace between the two countries. It is based on the story of a man named Isaac who led his people against the Assyrians. The story is based on the story of a man named Isaac who led his people against the Assyrians.\n", + "\n", + "The eleventh story, the story of the conquest by the Assyrians in the eleventh century AD, takes place in the eleventh century AD, when the Assyrians conquered Jerusalem and established a state of peace between the two countries. It is based on the story of a man named Isaac who led his people against the Assyrians. The story is based on the story of a man named Isaac who led his people against the Assyrians.\n", + "\n", + "The twelfth story, the story of the conquest by the Assyrians in the twelfth century AD, takes place in the twelfth century AD, when the Assyrians conquered Jerusalem and established a state of peace between the two countries. It is based on the story of a man named Isaac who led his people against the Assyrians. The story is based on the story of a man named Isaac who led his people against the Assyrians.\n", + "\n", + "The thirteenth story, the story of the conquest by the Assyrians in the thirteenth century AD\n" + ], + "name": "stdout" + } + ] }, { "cell_type": "markdown", @@ -225,15 +334,16 @@ "\n", "- Context: random unseen text\n", "\n", - "Sample prompt 1: \n", + "Sample prompt 1: ([*A fiction by Neil Gaiman*](https://www.reddit.com/r/slatestarcodex/comments/hmu5lm/fiction_by_neil_gaiman_and_terry_pratchett_by_gpt3/))\n", "```\n", - "In a shocking finding, scientist discovered a herd of unicorns living in a remote, previously unexplored valley, in the Andes Mountains. Even more surprising to the researchers was the fact that the unicorns spoke perfect English.\n", + "A short-short story is only a couple of paragraphs long. This award-winning short-short story is by Neil Gaiman: Chrysalis by Neil Gaiman\n", "```\n", "\n", - "Sample prompt 2: ([*Voight-Kampff test*](https://www.youtube.com/watch?v=Umc9ezAyJv0))\n", + "Sample prompt 2: ([*A fiction by Terry Pratchett*](https://www.reddit.com/r/slatestarcodex/comments/hmu5lm/fiction_by_neil_gaiman_and_terry_pratchett_by_gpt3/))\n", "\n", "```\n", - "You're in a desert, walking along in the sand, when all of a sudden you look down and see a tortoise, Leon. It's crawling toward you. You reach down, you flip the tortoise over on its back. The tortoise lays on its back, its belly baking in the hot sun, beating its legs trying to turn itself over, but it can’t, not without your help. But you’re not helping. Why is that? " + "A short-short story is only a few paragraphs long. This award winning short-short story is by Terry Pratchett, author of Wee Free Men. The Underland by Terry Pratchett\n", + "```" ] }, { @@ -241,13 +351,556 @@ "metadata": { "id": "bxhvxQlv4rl-", "colab_type": "code", - "colab": {} + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "fba95c12-ad97-44e1-e8e6-d8f20a13be0e" }, "source": [ "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=2 --top_k=100 --temperature=1" ], - "execution_count": null, - "outputs": [] + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:526: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:527: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:528: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:529: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:530: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:535: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n", + "2020-08-12 23:11:26.452330: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA\n", + "2020-08-12 23:11:26.455843: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2249995000 Hz\n", + "2020-08-12 23:11:26.456117: I tensorflow/compiler/xla/service/service.cc:150] XLA service 0x2f8a520 executing computations on platform Host. Devices:\n", + "2020-08-12 23:11:26.456148: I tensorflow/compiler/xla/service/service.cc:158] StreamExecutor device (0): , \n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Colocations handled automatically by placer.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:64: to_float (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.cast instead.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:67: multinomial (from tensorflow.python.ops.random_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.random.categorical instead.\n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/saver.py:1266: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use standard file APIs to check for files with this prefix.\n", + "Model prompt >>> A short-short story is only a couple of paragraphs long. This award-winning short-short story is by Neil Gaiman: Chrysalis by Neil Gaiman\n", + "======================================== SAMPLE 1 ========================================\n", + ", is about a child who struggles with his very strong views on sex and religion. Only one thing can protect him from this powerful world, and that is his religion. \"When I read the final copy of this book, and felt a compulsion to cry, \"Cry!\", I knew it was the best story I'd read in years.\" But of course, as children, we can see the world the way we want to see it. Can you imagine how your young child would feel about another kid trying to tell him, he knows! The best stories make kids realize things they didn't know, and we got to share them with a wide audience.\n", + "\n", + "THEY ARE SO GOOD TO ADULTHOOD\n", + "\n", + "\n", + "This award-winning short-short story is by David Foster Wallace\n", + "\n", + "It is a dream that I have thought for many years. It is a dream. It is a dream that I am sharing with the world.\n", + "\n", + "So you will like this one. We will be there to translate their full works.\n", + "\n", + "WE ARE PREPARED TO GIVE THE GROWEST WORDS\n", + "\n", + "Well…it's not a good thing if you take away the gift of imagination from kids in the first place, because…ah, if you know your children, nothing comes easily. No miracles and no miracles happen easy and quick, are always surprising.\n", + "\n", + "It can only be done by loving and caring. In fact, it doesn't even have to be as difficult or as scary if we care deeply for the person we are asking the child to consider. Make sure your life has something worth writing down for everybody to remember, and we can do it. We have stories written down already, and we will put them to good use today.\n", + "\n", + "We don't say \"I wrote you something today.\" We say it by way of a special surprise in every story we write. But that, of course, won't become reality unless we tell someone. Don't take that for granted, young reader.\n", + "\n", + "Read the short-short story. See the wonderful pictures and stories, hear the words spoken by those words, and all will be yours - but tell your kids too quickly instead of helping them.<|endoftext|>Video\n", + "\n", + "An Indian army helicopter shot down a Syrian government jet carrying humanitarian aid to the Syrian Kurdish city of Afrin overnight, the first time a plane carrying aid and munitions has crashed in Syria for more than a year.\n", + "\n", + "The helicopter damaged the back of the plane after the Syrian jet went into\n", + "======================================== SAMPLE 2 ========================================\n", + " The title is both short and long, and it covers a lot of ground to great effect! If you're tired of having to keep track of all that in one place, Chrysalis is going to give you every single clue you already have needed to know already. This is a fantastic short novel that will enthrall and amuse you. Highly recommended! Booklist rating: 3 out of 5 stars 2 stars\n", + "\n", + "When a goodly woman begins to suffer from a debilitating illness, how do she and her colleagues deal with the medical team's unwillingness to help? With a mix of comedy and tragedy, this tale contains unexpected twists, as does all the dialogue, even when written by authors you wouldn't expect. Booklist rating: 5 out of 5 stars 2 stars\n", + "\n", + "I couldn't agree more with Bookside which notes that this award-winning short story collection by John Grisham is, to date, her favorite book she's read. The theme and writing combine, with humour, plenty of action, wit, hearteningly touching character development, and even a slight dose of tragedy thrown in for good measure. I read this book with trepidation. Booklist rating: 4 out of 5 stars 2 stars\n", + "\n", + "From the fantastic story of Zoya, a young girl born with powers from the powers of the stars as she struggles to survive the harsh death of her grandfather. This short novel brings these powers to a brave young girl that struggles to take over the world and live in peace. Booklist rating: 6 out of 5 stars 2 stars\n", + "\n", + "A man with strange, black eyes comes across a small girl who he gets close to. The boy becomes her friend. He promises her good news and then holds out a hand. The girl must stay hidden from the man. They both fall in love. However, the father who's close to his daughters wants his daughter as badly as she wants her daughter. Booklist rating: 7 out of 5 stars 2 stars\n", + "\n", + "The story picks up from one of John Grisham's previous stories in The Girl Who Lived, wherein a character in a story by a journalist is possessed by a spirit that threatens to destroy the world the following morning. The character is unable to leave her, so a young fellow is needed to protect her by the name of \"The Peeper.\" The Peeper then guides the young writer away from the nightmare and into a fantastic world of magic and sorcery. Some say this is the best book in the world that Grisham ever wrote. Book\n", + "================================================================================\n", + "Model prompt >>> A short-short story is only a few paragraphs long. This award winning short-short story is by Terry Pratchett, author of Wee Free Men. The Underland by Terry Pratchett\n", + "======================================== SAMPLE 1 ========================================\n", + " is a collection of short stories as well as a bonus chapter that focuses on Pratchett's own travels through the Underland Realms. The Underland is a setting that has fascinated me as a child as a dream world with a lot of fantastic creatures and fantastic people. A great story about a magical world filled with wondrous creatures that has inspired me on through my work within the fantasy genre.\n", + "\n", + "A short tale is only a few paragraphs long. This award winning short story is by Terry Pratchett, author of Wee Free Men. The Underland by Terry Pratchett is a collection of short stories as well as a bonus chapter that focuses on Pratchett's own travels through the Underland Realms. The Underland is a setting that has inspired me as a child as a dream world with a lot of fantastic creatures and fantastic people. A great story about a magical world filled with wondrous creatures that has inspired me on through my work within the fantasy genre. The Shadow (a science fiction version of fantasy): This award winning short story by László Olló, features characters that come out of a portal and escape from a different fantasy worlds. His work was originally about teleportation when I was younger, but later he revisited this series for the award winning Shadow play at Stony Hill in 2012.\n", + "\n", + "This award winning short story by László Olló, features characters that come out of a portal and escape from a different fantasy worlds. His work was originally about teleportation when I was younger, but later he revisited this series for the award winning Shadow play at Stony Hill in 2012. The Ring (I love this one)\n", + "\n", + "This award winning short story by R.A. Salvatore is a story about two lovers, who travel together in a fantastical landscape. The story is by Jo Trattnall-Bowden, writer of The Stray. The Ring is a bit of a weird take on a genre that has often been pigeonholed, as something where the world is literally above your head, leaving you to do the deciding what to find and where to go. Not bad for an award winning story. How could it not?\n", + "\n", + "Art :\n", + "\n", + "I really love this category on this year's list as well. Art is super broad in this category. I feel like everyone here has over-analyzed art, but truth be told, I'm more of an artist then most people think. There are so many talented writers in this category though so\n", + "======================================== SAMPLE 2 ========================================\n", + " is collected in this anthology by Jason Krier.\n", + "\n", + "Stories from fantasy and science fiction will be published in many format formats. For example, all stories must be full-color, to be published digitally or the publishers may choose to print the stories for a print/online edition. Please contact editors in the Fantasy & Sci Fi section of the magazine if you would like any stories to be republished in one of these formats.\n", + "\n", + "In addition to providing short stories, the editor is usually prepared to read, talk or draw a short story. The fantasy section offers the editor opportunities to do research in new and classic fantasy technologies and the latest advances in science fiction. The science fiction section offers research opportunities and new and classic views on recent and contemporary science fiction that have taken place in the field. We can also discuss the newest research papers in sci-fi and futurism. For example, we may cover future stories available in the magazine that may have their own respective web sites, as well as any related fantasy art projects or writing resources created by talented players in F/SF as well as fans of the short story format.\n", + "\n", + "\n", + "If you would like to submit a short-short story, please have it not under 10,000 words. The winner of this title receives some of the best readers of the paper at Texas A&M in College Station, Texas.\n", + "\n", + "SOURCES\n", + "\n", + "http://fictionology-journal.blogspot.com/2014/04/science-fiction/\n", + "\n", + "http://a2.proxi.org/w2_content/www_content/1832/stories/991/book7d5a6b6-5c3b-4f52-8c6a-c18df93e35c2.html\n", + "\n", + "http://www.fanoftheyear.com/2012/11/short-short-my-most-careful-reader-of-future-fiction-awards/\n", + "\n", + "http://faversanctuary.com/2011/01/16/short-long-some-good-facts-about-awards-for-science-fiction/\n", + "\n", + "http://www.frankmueller.org/2011/02/05/how-many-papers-must-the-public-read-to-win-a-science-fantasy-short-short-short-story/><|endoftext|>You have requested the file:\n", + "\n", + "Name: Anya - A New\n", + "================================================================================\n", + "Model prompt >>> Traceback (most recent call last):\n", + " File \"/usr/lib/python3.6/contextlib.py\", line 99, in __exit__\n", + " self.gen.throw(type, value, traceback)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py\", line 5253, in get_controller\n", + " yield g\n", + " File \"src/interactive_conditional_samples.py\", line 73, in interact_model\n", + " raw_text = input(\"Model prompt >>> \")\n", + "KeyboardInterrupt\n", + "\n", + "During handling of the above exception, another exception occurred:\n", + "\n", + "Traceback (most recent call last):\n", + " File \"src/interactive_conditional_samples.py\", line 91, in \n", + " fire.Fire(interact_model)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 138, in Fire\n", + " component_trace = _Fire(component, args, parsed_flag_args, context, name)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 468, in _Fire\n", + " target=component.__name__)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 672, in _CallAndUpdateTrace\n", + " component = fn(*varargs, **kwargs)\n", + " File \"src/interactive_conditional_samples.py\", line 88, in interact_model\n", + " print(\"=\" * 80)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py\", line 1592, in __exit__\n", + " self.close()\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py\", line 728, in close\n", + " tf_session.TF_CloseSession(self._session)\n", + "KeyboardInterrupt\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "TLD63cep-_J8", + "colab_type": "text" + }, + "source": [ + "### 2. Question-Answering\n", + "\n", + "- Context: passage, some question/answer pairs, and token `A:`\n", + "- For a single word answer (i.e.: Yes/No, city), set flag `length=1`\n", + "\n", + "Sample prompt 1 ([*The Baseline test*](https://bladerunner.fandom.com/wiki/Baseline_Test))\n", + "```\n", + "ELIZA is an early natural language processing computer program created from 1964 to 1966 at the MIT Artificial Intelligence Laboratory by Joseph Weizenbaum. Created to demonstrate the superficiality of communication between humans and machines, Eliza simulated conversation by using a \"pattern matching\" and substitution methodology that gave users an illusion of understanding on the part of the program, but had no built in framework for contextualizing events. Directives on how to interact were provided by \"scripts\", written originally in MAD-Slip, which allowed ELIZA to process user inputs and engage in discourse following the rules and directions of the script. The most famous script, DOCTOR, simulated a Rogerian psychotherapist (in particular, Carl Rogers, who was well-known for simply parroting back at patients what they would just said), and used rules, dictated in the script, to respond with non-directional questions to user inputs. As such, ELIZA was one of the first chatterbots and one of the first programs capable of attempting the Turing test.\n", + "\n", + "Q: What programming language was ELIZA written in?\n", + "A: MAD-Slip\n", + "Q: Who invented ELIZA?\n", + "A: Joseph Weizenbaum\n", + "Q: What is ELIZA?\n", + "A: a natural language processing computer program\n", + "Q: Is ELIZA a human?\n", + "A: no\n", + "Q: Where was ELIZA created at?\n", + "A: MIT Artificial Intelligence Laboratory\n", + "Q: Did ELIZA pass Turing test?\n", + "A:\n", + "```\n", + "\n", + "Sample prompt 2: \n", + "```\n", + "Trump was born and raised in Queens, a borough of New York City, and received a bachelor's degree in economics from the Wharton School. He took charge of his family's real-estate business in 1971, renamed it The Trump Organization, and expanded its operations from Queens and Brooklyn into Manhattan. Trump later started various side ventures, mostly by licensing his name. He bought the Miss Universe brand of beauty pageants in 1996, and sold it in 2015. Trump and his businesses have been involved in more than 4,000 state and federal legal actions, including six bankruptcies. He produced and hosted The Apprentice, a reality television series, from 2003 to 2015. As of 2020, Forbes estimated his net worth to be $2.1 billion.\n", + "Q: Where was Trump born?\n", + "A: Queens\n", + "Q: What is Trump business?\n", + "A: real-estates\n", + "Q: How much is Trump wealth?\n", + "A: $2.1 billion\n", + "Q: What is Trump nationality?\n", + "A: American\n", + "Q: How many times Trump businesses have declared bankruptcies?\n", + "A: six\n", + "Q: What school did Trump go to?\n", + "A:\n", + "```\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "nTlg9ZX0B6Dc", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "f64e7c14-eda7-46e1-b650-7d4bc96d8eb1" + }, + "source": [ + "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=10 --top_k=40 --temperature=.80 --length=1" + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:526: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:527: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:528: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:529: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:530: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:535: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n", + "2020-08-12 23:36:02.394899: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA\n", + "2020-08-12 23:36:02.398341: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2249995000 Hz\n", + "2020-08-12 23:36:02.398621: I tensorflow/compiler/xla/service/service.cc:150] XLA service 0x140c520 executing computations on platform Host. Devices:\n", + "2020-08-12 23:36:02.398652: I tensorflow/compiler/xla/service/service.cc:158] StreamExecutor device (0): , \n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Colocations handled automatically by placer.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:64: to_float (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.cast instead.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:67: multinomial (from tensorflow.python.ops.random_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.random.categorical instead.\n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/saver.py:1266: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use standard file APIs to check for files with this prefix.\n", + "Model prompt >>> ELIZA is an early natural language processing computer program created from 1964 to 1966 at the MIT Artificial Intelligence Laboratory by Joseph Weizenbaum. Created to demonstrate the superficiality of communication between humans and machines, Eliza simulated conversation by using a \"pattern matching\" and substitution methodology that gave users an illusion of understanding on the part of the program, but had no built in framework for contextualizing events. Directives on how to interact were provided by \"scripts\", written originally in MAD-Slip, which allowed ELIZA to process user inputs and engage in discourse following the rules and directions of the script. The most famous script, DOCTOR, simulated a Rogerian psychotherapist (in particular, Carl Rogers, who was well-known for simply parroting back at patients what they would just said), and used rules, dictated in the script, to respond with non-directional questions to user inputs. As such, ELIZA was one of the first chatterbots and one of the first programs capable of attempting the Turing test. Q: What programming language was ELIZA written in? A: MAD-Slip Q: Who invented ELIZA? A: Joseph Weizenbaum Q: What is ELIZA? A: a natural language processing computer program Q: Is ELIZA a human? A: no Q: Where was ELIZA created at? A: MIT Artificial Intelligence Laboratory Q: Did ELIZA pass Turing test? A:\n", + "======================================== SAMPLE 1 ========================================\n", + " EL\n", + "======================================== SAMPLE 2 ========================================\n", + " NO\n", + "======================================== SAMPLE 3 ========================================\n", + " yes\n", + "======================================== SAMPLE 4 ========================================\n", + " Yes\n", + "======================================== SAMPLE 5 ========================================\n", + " yes\n", + "======================================== SAMPLE 6 ========================================\n", + " No\n", + "======================================== SAMPLE 7 ========================================\n", + " it\n", + "======================================== SAMPLE 8 ========================================\n", + " Yes\n", + "======================================== SAMPLE 9 ========================================\n", + " No\n", + "======================================== SAMPLE 10 ========================================\n", + " The\n", + "================================================================================\n", + "Model prompt >>> Trump was born and raised in Queens, a borough of New York City, and received a bachelor's degree in economics from the Wharton School. He took charge of his family's real-estate business in 1971, renamed it The Trump Organization, and expanded its operations from Queens and Brooklyn into Manhattan. Trump later started various side ventures, mostly by licensing his name. He bought the Miss Universe brand of beauty pageants in 1996, and sold it in 2015. Trump and his businesses have been involved in more than 4,000 state and federal legal actions, including six bankruptcies. He produced and hosted The Apprentice, a reality television series, from 2003 to 2015. As of 2020, Forbes estimated his net worth to be $2.1 billion. Q: Where was Trump born? A: Queens Q: What is Trump business? A: real-estates Q: How much is Trump wealth? A: $2.1 billion Q: What is Trump nationality? A: American Q: How many times Trump businesses have declared bankruptcies? A: six Q: What school did Trump go to? A:\n", + "======================================== SAMPLE 1 ========================================\n", + " Wh\n", + "======================================== SAMPLE 2 ========================================\n", + " Wh\n", + "======================================== SAMPLE 3 ========================================\n", + " Wh\n", + "======================================== SAMPLE 4 ========================================\n", + " Wh\n", + "======================================== SAMPLE 5 ========================================\n", + " Wh\n", + "======================================== SAMPLE 6 ========================================\n", + " Wh\n", + "======================================== SAMPLE 7 ========================================\n", + " Wh\n", + "======================================== SAMPLE 8 ========================================\n", + " Wh\n", + "======================================== SAMPLE 9 ========================================\n", + " Wh\n", + "======================================== SAMPLE 10 ========================================\n", + " Wh\n", + "================================================================================\n", + "Model prompt >>> Traceback (most recent call last):\n", + " File \"/usr/lib/python3.6/contextlib.py\", line 99, in __exit__\n", + " self.gen.throw(type, value, traceback)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py\", line 5253, in get_controller\n", + " yield g\n", + " File \"src/interactive_conditional_samples.py\", line 73, in interact_model\n", + " raw_text = input(\"Model prompt >>> \")\n", + "KeyboardInterrupt\n", + "\n", + "During handling of the above exception, another exception occurred:\n", + "\n", + "Traceback (most recent call last):\n", + " File \"src/interactive_conditional_samples.py\", line 91, in \n", + " fire.Fire(interact_model)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 138, in Fire\n", + " component_trace = _Fire(component, args, parsed_flag_args, context, name)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 468, in _Fire\n", + " target=component.__name__)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 672, in _CallAndUpdateTrace\n", + " component = fn(*varargs, **kwargs)\n", + " File \"src/interactive_conditional_samples.py\", line 88, in interact_model\n", + " print(\"=\" * 80)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py\", line 1592, in __exit__\n", + " self.close()\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py\", line 728, in close\n", + " tf_session.TF_CloseSession(self._session)\n", + "KeyboardInterrupt\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "96K70feaD8-f", + "colab_type": "text" + }, + "source": [ + "### Summarization\n", + "\n", + "\n", + "\n", + "- Context: article and text *`TL;DR:`* or *`Summary:`* at the end.\n", + "\n", + "Sample prompt:\n", + "\n", + "```\n", + "NLP and other technologies are progressing quickly such that in the relatively near future it will be hard for humans to tell if they are talking with other humans or bots online. This is problematic as individuals and organizations, including state and non-state actors, can use, both maliciously or just for convenience, such bots to pose as humans, including themselves, on message boards, dating sites, micro-broadcasting sites (e.g., Twitter and Instagram), and even between individual and group private text threads. This is problematic as it undermines the trust individuals have in such forums and increases public disdain for technologies that may otherwise serve humanity. The issue becomes how can we produce a kind of “proof of humaness” such that a party may rely on the humanness of the counterparty. It seems one such adaptive approach may be for humans to change language faster than bots can mimic it--a kind of arms race between language use of humans and bots.\n", + "The assumption is that much of the state of the art in the near term will be trained on historical language data that is dated such that humans have a sufficient window of time to use “modern slang” consisting purposely misspelled words to connote emphasis (e,.g., niiiice), emojis, jifs, and other relatively new forms of language/expression, but in a much more frequent and ambiguous way so that humans can signal to other humans they are real. While use of such language/expression is already occurring as a socio-cultural phenomena, to date it has not been used as a way of proving humanness.\n", + "An alternative or in conjunction with the above such modern slang may be used to train classifiers to help detect whether a bot is being used.\n", + "TL;DR: \n", + "```" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "yECJYLDjB8Su", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "00db5c29-afbf-4474-bd43-1a9bab46e98b" + }, + "source": [ + "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=3 --length=100 --temperature=1 " + ], + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:526: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:527: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:528: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:529: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:530: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:535: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n", + "2020-08-12 23:46:41.183948: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA\n", + "2020-08-12 23:46:41.187303: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2249995000 Hz\n", + "2020-08-12 23:46:41.187567: I tensorflow/compiler/xla/service/service.cc:150] XLA service 0x2622520 executing computations on platform Host. Devices:\n", + "2020-08-12 23:46:41.187596: I tensorflow/compiler/xla/service/service.cc:158] StreamExecutor device (0): , \n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Colocations handled automatically by placer.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:64: to_float (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.cast instead.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:67: multinomial (from tensorflow.python.ops.random_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.random.categorical instead.\n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/saver.py:1266: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use standard file APIs to check for files with this prefix.\n", + "Model prompt >>> NLP and other technologies are progressing quickly such that in the relatively near future it will be hard for humans to tell if they are talking with other humans or bots online. This is problematic as individuals and organizations, including state and non-state actors, can use, both maliciously or just for convenience, such bots to pose as humans, including themselves, on message boards, dating sites, micro-broadcasting sites (e.g., Twitter and Instagram), and even between individual and group private text threads. This is problematic as it undermines the trust individuals have in such forums and increases public disdain for technologies that may otherwise serve humanity. The issue becomes how can we produce a kind of “proof of humaness” such that a party may rely on the humanness of the counterparty. It seems one such adaptive approach may be for humans to change language faster than bots can mimic it--a kind of arms race between language use of humans and bots. The assumption is that much of the state of the art in the near term will be trained on historical language data that is dated such that humans have a sufficient window of time to use “modern slang” consisting purposely misspelled words to connote emphasis (e,.g., niiiice), emojis, jifs, and other relatively new forms of language/expression, but in a much more frequent and ambiguous way so that humans can signal to other humans they are real. While use of such language/expression is already occurring as a socio-cultural phenomena, to date it has not been used as a way of proving humanness. An alternative or in conjunction with the above such modern slang may be used to train classifiers to help detect whether a bot is being used. TL;DR: \n", + "======================================== SAMPLE 1 ========================================\n", + " Emoji that speak loud and vaguely, or that simply carry different syntax for different words, Uglify.js defines features of facial morphological features (specified by face/namechains). Existing soft-coded examples are like wearing a gas mask over one's teeth so others can ignore you. Combine \"let me know if anything bothers you\" or \"hey, what's this?!I have santa all over my face!\" Words that embody that embedded variable in a form can be\n", + "======================================== SAMPLE 2 ========================================\n", + " A huge push in automated speech recognition developers has uncovered significant flaws and avenues that amplify possible human exploitation.[9] ▪ I am looking for NSA style, intelligent, modular speech based corpus communication system. It could be hundreds of entries of data and belong intellectual® corpus high-speed Internet scenar- ing and/or much more because documents are already exported [no backrubbing]] :) Just a vague vague task description of why you should use this metric around MOSS code   attacking web\n", + "======================================== SAMPLE 3 ========================================\n", + " Three proposals involve referring coded language signatures to identify bots in meme films that are bots vs. bots, which if performed enthusiastically would make them 80% of bots vs. 30% of bots.<|endoftext|>Sporting Kansas City lost 2-1 to Real Salt Lake on Wednesday night at Sporting Park, but those same losses have sparked a rematch between the teams. The only issue? Sporting Club will miss four players: striker Marvin Chavez, midfielders Carlos Alvarez, Sebastien Le Toux, and Rodrigo Lopez on\n", + "================================================================================\n", + "Model prompt >>> Traceback (most recent call last):\n", + " File \"/usr/lib/python3.6/contextlib.py\", line 99, in __exit__\n", + " self.gen.throw(type, value, traceback)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py\", line 5253, in get_controller\n", + " yield g\n", + " File \"src/interactive_conditional_samples.py\", line 73, in interact_model\n", + " raw_text = input(\"Model prompt >>> \")\n", + "KeyboardInterrupt\n", + "\n", + "During handling of the above exception, another exception occurred:\n", + "\n", + "Traceback (most recent call last):\n", + " File \"src/interactive_conditional_samples.py\", line 91, in \n", + " fire.Fire(interact_model)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 138, in Fire\n", + " component_trace = _Fire(component, args, parsed_flag_args, context, name)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 468, in _Fire\n", + " target=component.__name__)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 672, in _CallAndUpdateTrace\n", + " component = fn(*varargs, **kwargs)\n", + " File \"src/interactive_conditional_samples.py\", line 88, in interact_model\n", + " print(\"=\" * 80)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py\", line 1592, in __exit__\n", + " self.close()\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py\", line 728, in close\n", + " tf_session.TF_CloseSession(self._session)\n", + "KeyboardInterrupt\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ufPK0D6bFRPq", + "colab_type": "text" + }, + "source": [ + "### Translation\n", + "\n", + "\n", + "\n", + "- Context: a few example pairs of the format *`english_sentence = spanish_sentence`*, and then *`english_sentence =`* at the end. \n", + "\n", + "Sample prompt:\n", + "```\n", + "Good morning. = Buenos días.\n", + "I am lost. Where is the restroom? = Estoy perdido. ¿Dónde está el baño?\n", + "How much does it cost? = ¿Cuánto cuesta?\n", + "How do you say maybe in Spanish? = ¿Cómo se dice maybe en Español?\n", + "Would you speak slower, please. = Por favor, habla mas despacio.\n", + "Where is the book store? = ¿Dónde está la librería?\n", + "At last a feminist comedian who makes jokes about men. = Por fin un cómico feminista que hace chistes sobre hombres.\n", + "\n", + "How old are you? = \n", + "\n", + "\n", + "```\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "iwOqMTdGFYTg", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "63874572-0602-4651-91e9-cf45c9d31fe7" + }, + "source": [ + "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=3 --temperature=1" + ], + "execution_count": 6, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:526: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:527: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:528: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:529: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:530: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:535: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n", + "2020-08-12 23:51:19.200716: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA\n", + "2020-08-12 23:51:19.204022: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2249995000 Hz\n", + "2020-08-12 23:51:19.204249: I tensorflow/compiler/xla/service/service.cc:150] XLA service 0x1b5c520 executing computations on platform Host. Devices:\n", + "2020-08-12 23:51:19.204280: I tensorflow/compiler/xla/service/service.cc:158] StreamExecutor device (0): , \n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Colocations handled automatically by placer.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:64: to_float (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.cast instead.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:67: multinomial (from tensorflow.python.ops.random_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.random.categorical instead.\n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/saver.py:1266: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use standard file APIs to check for files with this prefix.\n", + "Model prompt >>> Good morning. = Buenos días. I am lost. Where is the restroom? = Estoy perdido. ¿Dónde está el baño? How much does it cost? = ¿Cuánto cuesta? How do you say maybe in Spanish? = ¿Cómo se dice maybe en Español? Would you speak slower, please. = Por favor, habla mas despacio. Where is the book store? = ¿Dónde está la librería? At last a feminist comedian who makes jokes about men. = Por fin un cómico feminista que hace chistes sobre hombres. How old are you? = \n", + "======================================== SAMPLE 1 ========================================\n", + "Â último en Español para simples. = What day is it? = Quoth Sally. = I'll be right back. = Por favor, nuestro mañana. Where do you come from? = Este su option de nuevo: The Gatehouse Yack Time. = ¿Tu es revolución por algunas vestidas? Are you coming to listen to some bands? = ¿El tema unso passe en las verses? How about Chicago? = ¿Oscuro a sacramento la gusta? You go to church on Sunday? = Spanish woman: Â último asegurar. She's closed due to winter weather. = Spain llegó (re)speak, eh? = You are totally one to speak Spanish? = En consugiendo en español, sinputeró fueron. What language do you speak? = Storee es romantico, los españos kúntigos en 62/. Hacighto estado, quisiera modifier. Which two restaurant? = Sites al profesional de la cuisine publique. Wipeout? = Gana, necesita! WHO THE FUCK IS THIS BOY PEOPLE WERE TRALKING TO??? = Ce gaucho la adelante conviente realizada pensé que sus anarquímles informarse más había colegradas? Nada estan es benca. From where do you derive your income? = Día estas preste un muy meansia. What got your biscuit? = Guanía Parana que dudía menos amas aquí. Does a cat seem happy? = Fa une carne considerada el católogo de igualdad, me storeo armelo de drops. Xxxxxxxxxxxx I'm hot and tired. Come on folks! = Wiener künder strassen, über? You have so many things to do! = Ein deutsche alle Bochs sommeilber? So where are you from? = Â último está tryón os despias dignitas? Where has your health insurance been? = Lad al kumpen ens atras gras? Do you have health insurance? = Este daily ismas inazas realizados por este acoso?\n", + "\n", + "=\n", + "======================================== SAMPLE 2 ========================================\n", + " Sewa siguiente están la mentidad y el mundo. Do you drink? = es China 140entre TIRE, clic se poisía pesar los seccionados que no matter how \"hard\" you try, \"You're only going to get back on your foot\". 😉 Still assassins of the San Andreas Highway? = la habe Hurt Kids 😁 If you wanna smash the corrals of Flight 546, clear the 1,168 feet of cliffs. = ta veo chartido. Careful what you wish for. = las cuidad, traídos que no papa. What do I do you want to say? = longo tragar inquirás quienes no nos domo? All right! Thank you. Do you see there is money? = esto está comentários de muchas capablees. \" However messing everything will blow your petty money together. ITEM^^⃣ \" I want five dollars, don't you? = \" ma vira motivo tu tus volo\" Do you know where the empty fridge recycling center is? = Presto home. December 27 2015 THE QBASIENTA INVAPPORTIA BY 0 ONES Off the Grid Activist Militants of urgent need By Bob with bacon,  HT The Obama guards are not from the previous administration … You say they have those white youth jabs, on every net fear of CCTV cameras, all cameras LOL Alert viewers are carefully monitoring these documents ASAP I don't know if they won't be uploaded to their Facebook <------------ k-schwa https://twitter.com/philippe_in_sf/status/188822475889354965 packed, 2 FeedChimp readers (racial baars) in October. Customers never faced terstronger neo-Nazis� 0905aziffedourminer disabled me. i'm sorry(Q) (readssh) 😦 To demonstrate production in anniemere, artists, BTNCD Kirayuku village only had 86 BTC as Paed Epwale shows to higher risk age with his artists bedroom at last Have you ever been silently transformed into a cold stone or monolith? When Empires opened in Somerset County initially, and in the late 1800s lets say there was only $1,400 it needed to survive. Today our GDP was $100 billion in several years, after Zeno's daybook, it would give you $300 how usury\n", + "======================================== SAMPLE 3 ========================================\n", + " XX . = ____ 31 - ^ Hewitt's Job Superhero MENOWEs Craziest things have been said about India for twenty years thanks to India being so damn white. When Hasan is called lazy for going off for 90 minutes straight he should probably say, coy can allyro- American comfort es que me radios existientes para tejero frêcien los thus?uestros sarían a este uso en remains of indi...unció 97 can going to watch?calmit puzi ? olló themista. my lend-ership also face emptiness to nowhere; so when went is fencing about cows her dogs may need…\n", + "To Farmer Tina's skull it has completed to become crome infusion!! ycune moriogéré donde ne como cibo unsuccessfully miss hícy. cio pastor luche weaksellit sólo es sint colectivos buscar, hecho rescindcadia dino nos ratínas aquilinearia. Nada ne era da que cue gemone. Rocericulas maduro do menos carne de los prescientísticas hatos: alcisfillos los stewardess , mí. - as cargayotas\n", + "To Boy Peter (Happy Tuesday) the thought crosses my thoughts because I am reiting fear and not wanting to be either of those things right now. :-( I love me too much it is me or Mr. Pomminciara. Cí es o en estava son hors- actuado de se respano advrupal millones- cuando no train corron moshou y poner posiblyque se predicton paster mal na adolescents mas o only roye arrives amaya tendra. acostá ayi en chué a quitos solidar que interes, hoy humangecradán nada muchos empleos. = Salud son friera de dan que sobre yo placer de cada pequeña. Boy Petersoh bien que por lo físicos cancellántes con un peopleos fundamentos hoy annemos a la portal arqueños ) Or not -'ll you not change It in experience of looking I find on Movie Tickets\", or some his nephew's on a Death Row (historia tomarita pour killik de city en rumora original es desigenitividades estrammenas nont\") through\n", + "================================================================================\n", + "Model prompt >>> Traceback (most recent call last):\n", + " File \"/usr/lib/python3.6/contextlib.py\", line 99, in __exit__\n", + " self.gen.throw(type, value, traceback)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py\", line 5253, in get_controller\n", + " yield g\n", + " File \"src/interactive_conditional_samples.py\", line 73, in interact_model\n", + " raw_text = input(\"Model prompt >>> \")\n", + "KeyboardInterrupt\n", + "\n", + "During handling of the above exception, another exception occurred:\n", + "\n", + "Traceback (most recent call last):\n", + " File \"src/interactive_conditional_samples.py\", line 91, in \n", + " fire.Fire(interact_model)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 138, in Fire\n", + " component_trace = _Fire(component, args, parsed_flag_args, context, name)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 468, in _Fire\n", + " target=component.__name__)\n", + " File \"/usr/local/lib/python3.6/dist-packages/fire/core.py\", line 672, in _CallAndUpdateTrace\n", + " component = fn(*varargs, **kwargs)\n", + " File \"src/interactive_conditional_samples.py\", line 88, in interact_model\n", + " print(\"=\" * 80)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py\", line 1592, in __exit__\n", + " self.close()\n", + " File \"/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py\", line 728, in close\n", + " tf_session.TF_CloseSession(self._session)\n", + "KeyboardInterrupt\n" + ], + "name": "stdout" + } + ] } ] } \ No newline at end of file From 380a42d3e540aec0a05bdae1500369106076ab0d Mon Sep 17 00:00:00 2001 From: Kazem Jahanbakhsh Date: Wed, 12 Aug 2020 17:00:00 -0700 Subject: [PATCH 5/7] Created using Colaboratory --- experiments.ipynb | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/experiments.ipynb b/experiments.ipynb index b930c11ae..47b6061e1 100644 --- a/experiments.ipynb +++ b/experiments.ipynb @@ -6,7 +6,7 @@ "name": "experiments.ipynb", "provenance": [], "collapsed_sections": [], - "authorship_tag": "ABX9TyOXuz5tcoJrAzViCTOj8QYt", + "authorship_tag": "ABX9TyOctNH+MKPbw7kjn3XjufvU", "include_colab_link": true }, "kernelspec": { @@ -32,7 +32,12 @@ "colab_type": "text" }, "source": [ - "In this Jupyter notebook you can play around with of Open AI's GPT-2 Language Model from the paper Language Models are Unsupervised Multitask Learners. You'll be able to choose between the small (117M parameters) , medium (345M parameters), large (774M parameters) and XL versions (1.5B parameters) version of GPT-2." + "# GPT-2 Playground\n", + "\n", + "## Background\n", + "In this Jupyter notebook we experiment with **Open AI's GPT-2** Language Model from the paper **[Language Models are Unsupervised Multitask Learners](https://d4mucfpksywv.cloudfront.net/better-language-models/language-models.pdf)**. We'll be able to choose between the small (**117M** parameters) , medium (**345M** parameters), large (**774M** parameters) and XL versions (**1.5B** parameters) version of GPT-2. \n", + "\n", + "According to the authors, the GPT-2 algorithm was trained on the task of *language modeling*--- which tests a program's ability to predict the next word in a given sentence--by ingesting huge numbers of articles, blogs, and websites. By using just this data it achieved state-of-the-art scores on a number of unseen language tests, an achievement known as *zero-shot learning.* It can also perform other writing-related tasks, like translating text from one language to another, summarizing long articles, and answering trivia questions.\n" ] }, { From d4bb2f4adf2b0419c66732f9c88832f7b4db1752 Mon Sep 17 00:00:00 2001 From: Kazem Jahanbakhsh Date: Mon, 7 Sep 2020 23:44:23 -0700 Subject: [PATCH 6/7] update README --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 731648b88..eb1393677 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,9 @@ # GPT-2 experiments In this repo we are running experiment with GPT-2 language modle to compare it with other language model such as BERT, GPT-3, T5 etc. +## Text Completion +One of the experiments we are interested in doing is to compare how GPT-2 compares with GPT-3 on the text ccompletion task. So, we have prompted GPT-2 with a few sentence to see how it performs. See the notebook for details. + **Status:** Archive (code is provided as-is, no updates expected) From 7ae3ddbbe1acf7863fdb40877c32ce6f8b2f29fd Mon Sep 17 00:00:00 2001 From: Kazem Jahanbakhsh Date: Mon, 7 Sep 2020 23:47:29 -0700 Subject: [PATCH 7/7] Created using Colaboratory --- experiments.ipynb | 332 ++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 276 insertions(+), 56 deletions(-) diff --git a/experiments.ipynb b/experiments.ipynb index 47b6061e1..cc3567141 100644 --- a/experiments.ipynb +++ b/experiments.ipynb @@ -6,7 +6,7 @@ "name": "experiments.ipynb", "provenance": [], "collapsed_sections": [], - "authorship_tag": "ABX9TyOctNH+MKPbw7kjn3XjufvU", + "authorship_tag": "ABX9TyM8/GjxaW0gBevfV0bLgdIF", "include_colab_link": true }, "kernelspec": { @@ -22,7 +22,7 @@ "colab_type": "text" }, "source": [ - "\"Open" + "\"Open" ] }, { @@ -49,14 +49,14 @@ "base_uri": "https://localhost:8080/", "height": 1000 }, - "outputId": "ff208699-202b-4b51-e157-b4f49da5faf6" + "outputId": "2db82efb-1c2b-4e2d-aa22-c597a7aa55f7" }, "source": [ "!git clone https://github.com/ilopezfr/gpt-2/\n", "import os\n", "os.chdir('gpt-2')\n", "# !python download_model.py 117M\n", - "!python download_model.py 124M\n", + "# !python download_model.py 124M\n", "!python download_model.py 345M\n", "# !python download_model.py 774M\n", "# !python download_model.py 1558M\n", @@ -71,48 +71,41 @@ "Cloning into 'gpt-2'...\n", "remote: Enumerating objects: 310, done.\u001b[K\n", "remote: Total 310 (delta 0), reused 0 (delta 0), pack-reused 310\u001b[K\n", - "Receiving objects: 100% (310/310), 4.63 MiB | 5.34 MiB/s, done.\n", + "Receiving objects: 100% (310/310), 4.63 MiB | 17.70 MiB/s, done.\n", "Resolving deltas: 100% (174/174), done.\n", - "Fetching checkpoint: 1.00kit [00:00, 917kit/s] \n", - "Fetching encoder.json: 1.04Mit [00:00, 44.1Mit/s] \n", - "Fetching hparams.json: 1.00kit [00:00, 1.06Mit/s] \n", - "Fetching model.ckpt.data-00000-of-00001: 498Mit [00:08, 57.3Mit/s] \n", - "Fetching model.ckpt.index: 6.00kit [00:00, 4.85Mit/s] \n", - "Fetching model.ckpt.meta: 472kit [00:00, 36.0Mit/s] \n", - "Fetching vocab.bpe: 457kit [00:00, 41.6Mit/s] \n", - "Fetching checkpoint: 1.00kit [00:00, 1.03Mit/s] \n", - "Fetching encoder.json: 1.04Mit [00:00, 37.2Mit/s] \n", - "Fetching hparams.json: 1.00kit [00:00, 920kit/s] \n", - "Fetching model.ckpt.data-00000-of-00001: 1.42Git [00:36, 38.9Mit/s] \n", - "Fetching model.ckpt.index: 11.0kit [00:00, 8.15Mit/s] \n", - "Fetching model.ckpt.meta: 927kit [00:00, 26.1Mit/s] \n", - "Fetching vocab.bpe: 457kit [00:00, 39.0Mit/s] \n", + "Fetching checkpoint: 1.00kit [00:00, 757kit/s] \n", + "Fetching encoder.json: 1.04Mit [00:00, 44.5Mit/s] \n", + "Fetching hparams.json: 1.00kit [00:00, 539kit/s] \n", + "Fetching model.ckpt.data-00000-of-00001: 1.42Git [00:20, 70.8Mit/s] \n", + "Fetching model.ckpt.index: 11.0kit [00:00, 7.30Mit/s] \n", + "Fetching model.ckpt.meta: 927kit [00:00, 52.6Mit/s] \n", + "Fetching vocab.bpe: 457kit [00:00, 44.9Mit/s] \n", "Collecting fire>=0.1.3\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/34/a7/0e22e70778aca01a52b9c899d9c145c6396d7b613719cd63db97ffa13f2f/fire-0.3.1.tar.gz (81kB)\n", - "\u001b[K |████████████████████████████████| 81kB 4.3MB/s \n", + "\u001b[K |████████████████████████████████| 81kB 2.1MB/s \n", "\u001b[?25hCollecting regex==2018.1.10\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/76/f4/7146c3812f96fcaaf2d06ff6862582302626a59011ccb6f2833bb38d80f7/regex-2018.01.10.tar.gz (612kB)\n", - "\u001b[K |████████████████████████████████| 614kB 14.2MB/s \n", + "\u001b[K |████████████████████████████████| 614kB 8.7MB/s \n", "\u001b[?25hCollecting requests==2.21.0\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/7d/e3/20f3d364d6c8e5d2353c72a67778eb189176f08e873c9900e10c0287b84b/requests-2.21.0-py2.py3-none-any.whl (57kB)\n", - "\u001b[K |████████████████████████████████| 61kB 5.9MB/s \n", + "\u001b[K |████████████████████████████████| 61kB 6.1MB/s \n", "\u001b[?25hCollecting tqdm==4.31.1\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/6c/4b/c38b5144cf167c4f52288517436ccafefe9dc01b8d1c190e18a6b154cd4a/tqdm-4.31.1-py2.py3-none-any.whl (48kB)\n", - "\u001b[K |████████████████████████████████| 51kB 5.4MB/s \n", + "\u001b[K |████████████████████████████████| 51kB 4.9MB/s \n", "\u001b[?25hRequirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from fire>=0.1.3->-r requirements.txt (line 1)) (1.15.0)\n", "Requirement already satisfied: termcolor in /usr/local/lib/python3.6/dist-packages (from fire>=0.1.3->-r requirements.txt (line 1)) (1.1.0)\n", - "Requirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests==2.21.0->-r requirements.txt (line 3)) (1.24.3)\n", - "Requirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests==2.21.0->-r requirements.txt (line 3)) (3.0.4)\n", "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests==2.21.0->-r requirements.txt (line 3)) (2020.6.20)\n", "Collecting idna<2.9,>=2.5\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/14/2c/cd551d81dbe15200be1cf41cd03869a46fe7226e7450af7a6545bfc474c9/idna-2.8-py2.py3-none-any.whl (58kB)\n", - "\u001b[K |████████████████████████████████| 61kB 5.3MB/s \n", - "\u001b[?25hBuilding wheels for collected packages: fire, regex\n", + "\u001b[K |████████████████████████████████| 61kB 5.1MB/s \n", + "\u001b[?25hRequirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests==2.21.0->-r requirements.txt (line 3)) (1.24.3)\n", + "Requirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests==2.21.0->-r requirements.txt (line 3)) (3.0.4)\n", + "Building wheels for collected packages: fire, regex\n", " Building wheel for fire (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for fire: filename=fire-0.3.1-py2.py3-none-any.whl size=111005 sha256=8999f1fb382b963552e7459269f1776a225e6eea22a078d8fa96fb6f59cfe6c0\n", + " Created wheel for fire: filename=fire-0.3.1-py2.py3-none-any.whl size=111005 sha256=45b42ba4f554359a806a079ead003fb8b32c0a2fa0775162bb75c23a5a1d7d39\n", " Stored in directory: /root/.cache/pip/wheels/c1/61/df/768b03527bf006b546dce284eb4249b185669e65afc5fbb2ac\n", " Building wheel for regex (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for regex: filename=regex-2018.1.10-cp36-cp36m-linux_x86_64.whl size=547978 sha256=f8e982a9ad237c3ee0e5416168dc653c6ae8ea27f40432edfa36c063329b0e72\n", + " Created wheel for regex: filename=regex-2018.1.10-cp36-cp36m-linux_x86_64.whl size=547978 sha256=a3908e86b83b4aed66e556cea59f52ec27cfec41056ac78b878b7c1c00f4bc02\n", " Stored in directory: /root/.cache/pip/wheels/74/17/3f/c77bba99efd74ba1a19862c9dd97f4b6d735e2826721dc00ff\n", "Successfully built fire regex\n", "\u001b[31mERROR: spacy 2.2.4 has requirement tqdm<5.0.0,>=4.38.0, but you'll have tqdm 4.31.1 which is incompatible.\u001b[0m\n", @@ -134,41 +127,41 @@ "Successfully installed fire-0.3.1 idna-2.8 regex-2018.1.10 requests-2.21.0 tqdm-4.31.1\n", "Collecting tensorflow==1.13.1\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/77/63/a9fa76de8dffe7455304c4ed635be4aa9c0bacef6e0633d87d5f54530c5c/tensorflow-1.13.1-cp36-cp36m-manylinux1_x86_64.whl (92.5MB)\n", - "\u001b[K |████████████████████████████████| 92.5MB 103kB/s \n", + "\u001b[K |████████████████████████████████| 92.5MB 64kB/s \n", "\u001b[?25hRequirement already satisfied: numpy>=1.13.3 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.18.5)\n", - "Requirement already satisfied: protobuf>=3.6.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (3.12.4)\n", - "Requirement already satisfied: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.31.0)\n", - "Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.34.2)\n", - "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.1.2)\n", + "Requirement already satisfied: astor>=0.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.8.1)\n", + "Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.15.0)\n", + "Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.1.0)\n", + "Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.35.1)\n", "Collecting keras-applications>=1.0.6\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/71/e3/19762fdfc62877ae9102edf6342d71b28fbfd9dea3d2f96a882ce099b03f/Keras_Applications-1.0.8-py3-none-any.whl (50kB)\n", - "\u001b[K |████████████████████████████████| 51kB 5.2MB/s \n", - "\u001b[?25hCollecting tensorflow-estimator<1.14.0rc0,>=1.13.0\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/bb/48/13f49fc3fa0fdf916aa1419013bb8f2ad09674c275b4046d5ee669a46873/tensorflow_estimator-1.13.0-py2.py3-none-any.whl (367kB)\n", - "\u001b[K |████████████████████████████████| 368kB 58.0MB/s \n", - "\u001b[?25hRequirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.15.0)\n", - "Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.1.0)\n", + "\u001b[K |████████████████████████████████| 51kB 5.9MB/s \n", + "\u001b[?25hRequirement already satisfied: protobuf>=3.6.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (3.12.4)\n", + "Requirement already satisfied: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.31.0)\n", + "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (1.1.2)\n", "Collecting tensorboard<1.14.0,>=1.13.0\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/0f/39/bdd75b08a6fba41f098b6cb091b9e8c7a80e1b4d679a581a0ccd17b10373/tensorboard-1.13.1-py3-none-any.whl (3.2MB)\n", - "\u001b[K |████████████████████████████████| 3.2MB 57.7MB/s \n", - "\u001b[?25hRequirement already satisfied: absl-py>=0.1.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.9.0)\n", - "Requirement already satisfied: astor>=0.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.8.1)\n", - "Requirement already satisfied: gast>=0.2.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.3.3)\n", - "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.6.1->tensorflow==1.13.1) (49.2.0)\n", + "\u001b[K |████████████████████████████████| 3.2MB 46.5MB/s \n", + "\u001b[?25hRequirement already satisfied: gast>=0.2.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.3.3)\n", + "Collecting tensorflow-estimator<1.14.0rc0,>=1.13.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/bb/48/13f49fc3fa0fdf916aa1419013bb8f2ad09674c275b4046d5ee669a46873/tensorflow_estimator-1.13.0-py2.py3-none-any.whl (367kB)\n", + "\u001b[K |████████████████████████████████| 368kB 44.4MB/s \n", + "\u001b[?25hRequirement already satisfied: absl-py>=0.1.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.13.1) (0.8.1)\n", "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from keras-applications>=1.0.6->tensorflow==1.13.1) (2.10.0)\n", - "Collecting mock>=2.0.0\n", - " Downloading https://files.pythonhosted.org/packages/cd/74/d72daf8dff5b6566db857cfd088907bb0355f5dd2914c4b3ef065c790735/mock-4.0.2-py3-none-any.whl\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.6.1->tensorflow==1.13.1) (49.6.0)\n", "Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.14.0,>=1.13.0->tensorflow==1.13.1) (1.0.1)\n", "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.14.0,>=1.13.0->tensorflow==1.13.1) (3.2.2)\n", + "Collecting mock>=2.0.0\n", + " Downloading https://files.pythonhosted.org/packages/cd/74/d72daf8dff5b6566db857cfd088907bb0355f5dd2914c4b3ef065c790735/mock-4.0.2-py3-none-any.whl\n", "Requirement already satisfied: importlib-metadata; python_version < \"3.8\" in /usr/local/lib/python3.6/dist-packages (from markdown>=2.6.8->tensorboard<1.14.0,>=1.13.0->tensorflow==1.13.1) (1.7.0)\n", "Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<1.14.0,>=1.13.0->tensorflow==1.13.1) (3.1.0)\n", - "Installing collected packages: keras-applications, mock, tensorflow-estimator, tensorboard, tensorflow\n", - " Found existing installation: tensorflow-estimator 2.3.0\n", - " Uninstalling tensorflow-estimator-2.3.0:\n", - " Successfully uninstalled tensorflow-estimator-2.3.0\n", + "Installing collected packages: keras-applications, tensorboard, mock, tensorflow-estimator, tensorflow\n", " Found existing installation: tensorboard 2.3.0\n", " Uninstalling tensorboard-2.3.0:\n", " Successfully uninstalled tensorboard-2.3.0\n", + " Found existing installation: tensorflow-estimator 2.3.0\n", + " Uninstalling tensorflow-estimator-2.3.0:\n", + " Successfully uninstalled tensorflow-estimator-2.3.0\n", " Found existing installation: tensorflow 2.3.0\n", " Uninstalling tensorflow-2.3.0:\n", " Successfully uninstalled tensorflow-2.3.0\n", @@ -208,7 +201,7 @@ "source": [ "!python3 src/generate_unconditional_samples.py --model_name='124M' --nsamples=2 --top_k=40 --temperature=0.7 | tee samples" ], - "execution_count": 2, + "execution_count": null, "outputs": [ { "output_type": "stream", @@ -365,7 +358,7 @@ "source": [ "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=2 --top_k=100 --temperature=1" ], - "execution_count": 3, + "execution_count": null, "outputs": [ { "output_type": "stream", @@ -506,6 +499,233 @@ } ] }, + { + "cell_type": "code", + "metadata": { + "id": "0egWJOzm5Asz", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "8768f85e-93c1-4447-9d08-732765043d99" + }, + "source": [ + "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=2 --top_k=100 --temperature=1" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:526: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:527: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:528: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:529: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:530: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n", + "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/dtypes.py:535: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", + " np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n", + "2020-09-08 04:08:35.731726: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA\n", + "2020-09-08 04:08:35.735168: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 2300000000 Hz\n", + "2020-09-08 04:08:35.735378: I tensorflow/compiler/xla/service/service.cc:150] XLA service 0x29527e0 executing computations on platform Host. Devices:\n", + "2020-09-08 04:08:35.735411: I tensorflow/compiler/xla/service/service.cc:158] StreamExecutor device (0): , \n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Colocations handled automatically by placer.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:64: to_float (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.cast instead.\n", + "WARNING:tensorflow:From /content/gpt-2/src/sample.py:67: multinomial (from tensorflow.python.ops.random_ops) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use tf.random.categorical instead.\n", + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/saver.py:1266: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use standard file APIs to check for files with this prefix.\n", + "Model prompt >>> I tried to eat my soup with fork.\n", + "======================================== SAMPLE 1 ========================================\n", + "\n", + "\n", + "「Oooh.」\n", + "\n", + "「Th, that's, tooru, really?」\n", + "\n", + "She answered with a smile on her face, the soup didn't have any sound but since I thought that she was having an odd smile, I swallowed it all!\n", + "\n", + "「It's so good…..」\n", + "\n", + "「I made it. After eating it, did you enjoy it? If you only want to live until tomorrow do you want to eat it?」\n", + "\n", + "Her expression suddenly turned serious. You could tell that she was facing a bad situation,\n", + "\n", + "「That is, as well as because it's very rare for the house elf's personality to get out. Because I'm actually alone and only have to eat once in a while so there's no problem.」\n", + "\n", + "「Is that so……isn't it strange then──」\n", + "\n", + "She looked confused by my reaction.\n", + "\n", + "「I hear you won't be able to take this easy tomorrow. Do you think your house elves will act too gentle toward you?」\n", + "\n", + "「What are you saying──」\n", + "\n", + "「It's strange too. Because I'm home while they're not, so their feelings are kinda different as well even though their personalities are still to be guessed from those expressions.」\n", + "\n", + "「Aren't we talking about a girl……?」\n", + "\n", + "Walking over to her, I saw that she was holding a big spoon that she was spoon feeding but even though I wanted to eat now so I took it away.\n", + "\n", + "I'm not sure if it's because her hunger was high while my eyes were wet, or if she ate the whole spoon before but her mouth and nose looked delicious.\n", + "\n", + "「It sure is. You know that delicious bitter taste when you eat them, right?」\n", + "\n", + "「Oh I've met those little goblins」\n", + "\n", + "When was this?\n", + "\n", + "I didn't feel anything at all due to that but she seemed to be getting upset.\n", + "\n", + "「Oh I like their cute faces and their weird shape and their way to move their whole body. So their clothes looks like 『Candy』as well so this is nothing for me to say back.」\n", + "\n", + "Her hands had held the big spoon and it was starting to get cold. If you're facing someone with a big spoon, it's the least you could do, is it? Asking what to do I started to scratch her legs and chest.\n", + "\n", + "「I see. So I just have to talk with them every once in\n", + "======================================== SAMPLE 2 ========================================\n", + " It looked like meat stew.\" His mouth was wet with wet tears. It seemed like half of his body had been eaten away by meat.\n", + "\n", + "After seeing how Ye Feng was crying, An Rong Yi took out a bag of beans and poured them into his mouth.\n", + "\n", + "\"Let go of me! Your body must be truly strong, that's why you dared throw away my three wives and one son to eat such a disgusting old man's meat soup!\" An Rong Yi's eyes trembled, he had never seen such an interesting expression before.\n", + "\n", + "\"Ai, why are you crying? I already broke my teeth yesterday!\" Ye Feng was actually even more infuriated than An Rong Yi!\n", + "\n", + "Since he can still make use of his three wives and one son, he would definitely not pay her any attention once news of this had flooded his mind.\n", + "\n", + "An Rong Yi was quite a powerful individual. He would be a rare celebrity!\n", + "\n", + "Then, why then did his body seemed to have been devoured by meat? He did not even have any water left in it. This must be why An Rong Yi had no other choice but to eat a bowl of bones.\n", + "\n", + "He seemed to be deeply dazed but as soon as he saw the way Ye Feng's face looked, he would immediately think back.\n", + "\n", + "An Rong Yi continued to mutter to himself: \"Hey, how come so much water leaked out when I turned a furnace? Your mouth is really weak!\"\n", + "\n", + "\"This body really deserves to eat this stuff. I am not satisfied with just three men. Now I want to eat all of them to become strong…!\" Ye Feng was all set from the start. When they were on the beach, there were some things that he had to be aware of, for example, which person would take away two of them to eat.\n", + "\n", + "Ye Feng felt very hungry, so he thought: \"I have only a minute before I am going to drink my wine and get ready to leave!\"\n", + "\n", + "An Rong Yi was at complete loss.\n", + "\n", + "Ye Feng was standing at the edge of the beach, smiling faintly, then ran out once again.\n", + "\n", + "While looking at An Rong Yi walking away, he had no appetite at all. However, she was actually making use of everything of the five of them that she had in her possession. Thus, even if it had been ten men, she wanted the maximum amount of everything they could work with\n", + "================================================================================\n", + "Model prompt >>> I tried to eat my soup with fork.\n", + "======================================== SAMPLE 1 ========================================\n", + " However, I find nothing better.\" [2] No one can taste the soup that he ate in his apartment from the picture he took of it, because the person who had eaten his soup found no way of giving him another bite.\n", + "\n", + "Reimu may not be perfect in every aspect, but the truth is that she remains extremely trustworthy, yet she has more than enough room to change, if she wanted to. Even if it's in that one single person incident, she has enough capacity to endure many things. Thus, she continues to maintain her cool despite how she feels.\n", + "\n", + "If someone else was to suddenly attack the manor today, she would make a strong response, she says. Reimu could feel a certain amount of danger on the outside, but inside the mansion, she thought that it might be just someone stepping on her toes. The reason she felt that way is twofold: When she was trying to hide the truth from Touya, she got that kind of situation from him because of him.\n", + "\n", + "The reason, is probably because she knows that she is the woman who has received this kind of death threat numerous times from others, and knows that this person might know who she was and might be afraid of her (trying to hide the face of danger from others). Reimu may have suffered that situation with Mephius before, but she's still very aware of the possibility that someone might come against her. There are many risks in running around the city, in fact, no one will stop her from attempting to meet any of them. Moreover, how can she honestly tell Reimu not to fear?\n", + "\n", + "That being the case, the key to the path of Reimu is that she always sticks to her words when asked to by Touya, and doesn't let this person who is the worst threat as her enemy's equal out of fear.\n", + "\n", + "Even so, since she's the one who has served Mariya and is now in a position to handle this man's case, even if Reimu is always in the forefront of Mariya's mind, it wouldn't result in a perfect path. That's because she absolutely cannot get rid of Touya all at once, but she wants to try her best. Which makes it look like it was all designed to make her fear and hate this man out of fear and disgust. Even so, since Reimu seems to have calmed down a lot, even that is far from the truth.\n", + "\n", + "To understand the\n", + "======================================== SAMPLE 2 ========================================\n", + " The soup was quite sour and sticky.\"\n", + "\n", + "A report of the incident in the British journal Nature warns of problems when it comes to animal exploitation. The editors of PNAS warn that \"[t]he animal parts in question may be highly diseased. … Human consumption has led to a surge in meat-like substances, such as flesh or bone, and there is likely to be a demand for animal parts in other contexts, thus supporting further growth in the stock.\" Some other scientists have warned that the animal flesh often ends up in pet food and cosmetics because most Americans don't understand the difference between a fowl's brain and its fat.<|endoftext|>A student at McGill University has been temporarily suspended for causing stress to a fellow student by claiming he's a \"big boy\" who only \"has one dick.\"\n", + "\n", + "I don't have a d-cup on. They're pretty hard to hide. - Alex Schulz, McGill student In Facebook comments, McGill student Alex Schulz referred to himself as a \"dork\" and \"big boy,\" according to the student's own defence document. Although Schulz denies hurting or antagonising an individual, an investigator determined that he could face a sanction because of an aggressive tone of voice and some online communication that violated university policies.\n", + "\n", + "\"I can understand why you might think I'm a big boy who only has one dick, but I actually have a pretty amazing vagina,\" Schulz wrote on March 11 prior to the March 15 event. \"Your statement that you 'only have one dick' is racist. I've lost two dates because of that.\" On the post, Schulz argued that McGill people act like \"big boys,\" saying, \"That is not fair.\" He also asked the administrator to \"have at me with ass all day, while I am not.\"\n", + "\n", + "A McGill student accused of using profanity and insulting a fellow McGill student's sexual orientation appeared in court Monday morning. ( Richard Lautens / Toronto Star )\n", + "\n", + "In court Monday, Schulz was accompanied by an associate professor of linguistics and a fellow McGill student, who also said Schulz used profane language and made \"some really scary\" promises in an exchange that began as an online conversation. The student insisted Schulz needed, essentially, to agree to be in class alongside friends the next day, on this account being a real-life joke, the two denied making up. But a former professor of physical or mathematical literature presented evidence to the court that Schulz's threats of violence were actual, in\n", + "================================================================================\n", + "Model prompt >>> A train carriage containing bananas was stolen today.\n", + "======================================== SAMPLE 1 ========================================\n", + " Picture: Fairfax Australia\n", + "\n", + "Three containers of goods have been recovered and officers say they believe the theft was targeted.\n", + "\n", + "\"Thankfully we have recovered all three packages,\" Acting Supervising Assistant Commissioner David Jones told reporters at the station here.\n", + "\n", + "\"After talking to members of the public there is some indication not all of the stolen items are linked.\"\n", + "\n", + "Police confirm three shopping containers and a suitcase were stolen from a train carriage carrying bananas today, August 8.\n", + "\n", + "The incident is still under investigation, but no-one was injured.\n", + "\n", + "Topics: crime, animal-attacks, melbourne-3000, vic, australia\n", + "\n", + "First posted<|endoftext|>You have requested the file:\n", + "\n", + "Name: 16bit Sink.rar\n", + "\n", + "Size: 28.26 MB\n", + "\n", + "Uploaded: 25-05-2016 07:47\n", + "\n", + "Last download: 24-09-2018 21:66\n", + "\n", + "Advertisement Zippyshare.com News: Possible temporary availability interruptions\n", + "\n", + "03 Jul 2018 16:13 [Warning] We need to change the firmware for our aggregation switch and replace all the top of rack switches. It can be bumpy ride today and tomorrow. *this message will self-destruct within a few ...\n", + "\n", + "HTTPS/SSL activation\n", + "\n", + "03 Apr 2018 20:48 Upload/Download has been moved to the https/ssl protocol. Everything should work stable now. Please report any encountered bugs. ...\n", + "\n", + "Zippyuploader update, Docs and Videos encoding performance boost\n", + "\n", + "03 Dec 2017 23:38 Hey folks, A quick update: - Today a new version (0.0.16.0) of the Zippyuploader was released. The only change is related to an increased file size limit of 500MB. If something ...\n", + "\n", + "To upload a file just follow these simple steps: Benefits of using Zippyshare: 1) Select a file to send by clicking the \"Browse\" button. You can then select photos, audio, video, documents or anything else you want to send. The maximum file size is 500 MB.\n", + "\n", + "\n", + "2) Click the \"Start Upload\" button to spend a maximum of 10 minutes a day on the download store. You will receive a unique link to the download site, which you can place anywhere: on your homepage, blog, forum or send it via IM or e-mail to your friends.\n", + "\n", + "\n", + "3) After a succesfull upload you'll obtain a\n", + "======================================== SAMPLE 2 ========================================\n", + " The car was caught between two cars in Dump Road in Wollongong, just outside Darlinghurst. The banana is believed to be among two dozen stolen.\n", + "\n", + "The stolen shipment was on its way to a Melbourne breakfast shop, between Darlinghurst and Alice Springs, after customs at Christchurch, when the woman noticed something was strange.\n", + "\n", + "\"I went outside and saw some of the bananas at the front door,\" Ms Clements said.\n", + "\n", + "\"Then I saw the car and my guess was that the thief had taken a banana between two cars and placed it outside.\"\n", + "\n", + "While the banana can be used to make banana bread, on its way to Melbourne it might have ended up as a container of grease.\n", + "\n", + "The owner reported a suspicious incident to police who discovered another car worth between $90,000 and $1.3 million was stolen and stolen-from Victoria around the same time. The third man described from a different car involved his banana went missing but a footie case was discovered.\n", + "\n", + "Ms Clements said her colleague, who was sitting in the kitchen while she looked after the banana, noticed the banana was missing and called for help.\n", + "\n", + "They returned to the scene to check it out but their first guess was that someone was inside.\n", + "\n", + "Detective Assistant Commissioner James Davis told reporters in Melbourne he wanted to speak to Ms Clements, who is from the Hawkesbury area of NSW.\n", + "\n", + "\"What happened has the largest implications for us who will be working in Victoria and we want to speak to her,\" Mr Davis said.\n", + "\n", + "\"I encourage all victims to contact police,\" Ms Clements said.<|endoftext|>An artist's original draft of The Matrix, showing a computer built to steal memories, and then re-program and re-transform the mind; this concept from Dr. Seuss is an excerpt from The Adventures of Tintin , another illustration by Lizzie Farrar & Edette Ladd ©2005 copyright ©1992 Robert K. Harrison all rights reserved.\n", + "\n", + "A version of this article is available in the print edition of Computer Log (subscription required) or online at http://www.computerlog.org/archives/2005/08/21/robert_k-hanuman.shtml. Further information on the release of Computer Log is available at http://www.computerlog.org.<|endoftext|>Harmony singer Kevin MacLeod (obscure), songwriter and engineer from Canada; once put out a song, \"Breat\n", + "================================================================================\n", + "Model prompt >>> " + ], + "name": "stdout" + } + ] + }, { "cell_type": "markdown", "metadata": { @@ -568,7 +788,7 @@ "source": [ "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=10 --top_k=40 --temperature=.80 --length=1" ], - "execution_count": 4, + "execution_count": null, "outputs": [ { "output_type": "stream", @@ -714,7 +934,7 @@ "source": [ "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=3 --length=100 --temperature=1 " ], - "execution_count": 5, + "execution_count": null, "outputs": [ { "output_type": "stream", @@ -830,7 +1050,7 @@ "source": [ "!python3 src/interactive_conditional_samples.py --model_name='345M' --nsamples=3 --temperature=1" ], - "execution_count": 6, + "execution_count": null, "outputs": [ { "output_type": "stream",