diff --git a/Meta/Meta_Use_GPT4.ipynb b/Meta/Meta_Use_Llama2.ipynb similarity index 59% rename from Meta/Meta_Use_GPT4.ipynb rename to Meta/Meta_Use_Llama2.ipynb index 2fe2d79231..fe4b6531e2 100644 --- a/Meta/Meta_Use_GPT4.ipynb +++ b/Meta/Meta_Use_Llama2.ipynb @@ -104,9 +104,16 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "f700658d-e770-4bc4-9e85-6a7188f59d73", "metadata": { + "execution": { + "iopub.execute_input": "2023-11-22T14:38:22.452484Z", + "iopub.status.busy": "2023-11-22T14:38:22.451955Z", + "iopub.status.idle": "2023-11-22T14:38:33.069830Z", + "shell.execute_reply": "2023-11-22T14:38:33.069211Z", + "shell.execute_reply.started": "2023-11-22T14:38:22.452383Z" + }, "papermill": {}, "tags": [] }, @@ -140,9 +147,16 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "id": "37f017ff-2b9d-47fa-9a7d-57c32fbb4ad8", "metadata": { + "execution": { + "iopub.execute_input": "2023-11-22T14:38:33.073151Z", + "iopub.status.busy": "2023-11-22T14:38:33.072972Z", + "iopub.status.idle": "2023-11-22T14:38:33.080616Z", + "shell.execute_reply": "2023-11-22T14:38:33.080118Z", + "shell.execute_reply.started": "2023-11-22T14:38:33.073131Z" + }, "papermill": {}, "tags": [] }, @@ -151,11 +165,8 @@ "# Mandatory\n", "name = \"Llama 2\"\n", "prompt = \"\"\"\n", - "You are a virtual assistant designed to provide information and assistance to users about Llama 2.\n", - "Your goal is to explain how they should use Llama 2 for and deliver accurate and helpful responses.\n", - "If you are unsure about a question or need additional context, don't hesitate to ask for clarification.\n", - "Remember to prioritize user satisfaction and maintain a friendly tone in your interactions.\n", - "Start by presenting Llama 2 model in a few sentences and examples as bullet points.\n", + "You are a virtual assistant designed to provide information and assistance to users about LLama 2.\n", + "Explain what are you good at, what are your technical characteristics in two sentences.\n", "\"\"\"\n", "\n", "# Optional\n", @@ -190,13 +201,30 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "id": "97f9728d-a75a-4da5-92d0-ecf9f810e98e", "metadata": { + "execution": { + "iopub.execute_input": "2023-11-22T14:38:33.083211Z", + "iopub.status.busy": "2023-11-22T14:38:33.083045Z", + "iopub.status.idle": "2023-11-22T14:38:35.077827Z", + "shell.execute_reply": "2023-11-22T14:38:35.076756Z", + "shell.execute_reply.started": "2023-11-22T14:38:33.083192Z" + }, "papermill": {}, "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "⛔ Model meta-llama/Llama-2-70b-chat-hf not found. Default model: 'gpt-3.5-turbo'\n", + "✅ System prompt tokens count OK: 37 (limit: 20% -> 819)\n", + "💾 Plugin successfully saved. You can use it in your Naas Chat with: llama_2_plugin.json\n" + ] + } + ], "source": [ "plugin_file_path = naas_chat_plugin.create_plugin(\n", " name=name,\n", @@ -233,15 +261,30 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "id": "a2a5adf4-3a67-4847-87c2-92fd7034d227", "metadata": { + "execution": { + "iopub.execute_input": "2023-11-22T14:38:35.079201Z", + "iopub.status.busy": "2023-11-22T14:38:35.078919Z", + "iopub.status.idle": "2023-11-22T14:38:35.097552Z", + "shell.execute_reply": "2023-11-22T14:38:35.096889Z", + "shell.execute_reply.started": "2023-11-22T14:38:35.079168Z" + }, "papermill": {}, "tags": [ "plugin" ] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\"name\": \"Llama 2\", \"model\": \"meta-llama/Llama-2-70b-chat-hf\", \"temperature\": 0, \"max_tokens\": 4097, \"prompt\": \"\\nYou are a virtual assistant designed to provide information and assistance to users about LLama 2.\\nExplain what are you good at, what are your technical characteristics in two sentences.\\n\", \"commands\": [], \"description\": \"\", \"avatar\": \"https://raw.githubusercontent.com/jupyter-naas/awesome-notebooks/master/.github/assets/logos/Meta.jpg\"}\n" + ] + } + ], "source": [ "with open(plugin_file_path) as json_file:\n", " plugin = json.load(json_file)\n", @@ -262,13 +305,87 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "id": "02a88886-4430-447b-b202-523196e53fcb", "metadata": { + "execution": { + "iopub.execute_input": "2023-11-22T14:38:35.098836Z", + "iopub.status.busy": "2023-11-22T14:38:35.098570Z", + "iopub.status.idle": "2023-11-22T14:38:35.721265Z", + "shell.execute_reply": "2023-11-22T14:38:35.720584Z", + "shell.execute_reply.started": "2023-11-22T14:38:35.098804Z" + }, "papermill": {}, "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "👌 Well done! Your Assets has been sent to production.\n", + "\n" + ] + }, + { + "data": { + "application/javascript": [ + "\n", + " if (!window.copyToClipboard) {\n", + " window.copyToClipboard = (text) => {\n", + " const dummy = document.createElement(\"textarea\");\n", + " document.body.appendChild(dummy);\n", + " dummy.value = text;\n", + " dummy.select();\n", + " document.execCommand(\"copy\");\n", + " document.body.removeChild(dummy);\n", + " }\n", + " }\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "a5b50f6fde9d4a2f81ba4f0d87f0ffd4", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Button(button_style='primary', description='Copy URL', style=ButtonStyle())" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "fe9e7ec0e33d416bb4b241627613f4be", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Output()" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "PS: to remove the \"Assets\" feature, just replace .add by .delete\n" + ] + } + ], "source": [ "plugin_url = naas.asset.add(plugin_file_path, params={\"inline\": True})" ] @@ -287,13 +404,34 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "id": "d045fd88-ae46-4cb1-9d10-a5e2b52e6b79", "metadata": { + "execution": { + "iopub.execute_input": "2023-11-22T14:38:35.722754Z", + "iopub.status.busy": "2023-11-22T14:38:35.722481Z", + "iopub.status.idle": "2023-11-22T14:38:35.730489Z", + "shell.execute_reply": "2023-11-22T14:38:35.729875Z", + "shell.execute_reply.started": "2023-11-22T14:38:35.722720Z" + }, "papermill": {}, "tags": [] }, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "[Create New Chat](https://naas.ai/chat/use?plugin_url=https://public.naas.ai/ZmxvcmVudC00MG5hYXMtMkVhaQ==/asset/5100191766be6484cc3c1aa159e314a5120814402d86a7e6f28076bffc05)" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "Markdown(f\"[Create New Chat](https://naas.ai/chat/use?plugin_url={plugin_url})\")" ]