diff --git a/00_core.ipynb b/00_core.ipynb index 725daf6..1e26503 100644 --- a/00_core.ipynb +++ b/00_core.ipynb @@ -200,7 +200,7 @@ { "data": { "text/plain": [ - "Message(id='msg_01FGnZXGWy9YJirXgTr2Dvtw', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How are you doing today? Is there anything in particular you'd like to chat about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=Usage(input_tokens=10, output_tokens=36))" + "Message(id='msg_019qLJxay5HTSe8krZkYDgoV', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. Is there anything I can help you with today?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=Usage(input_tokens=10, output_tokens=23))" ] }, "execution_count": null, @@ -282,7 +282,7 @@ { "data": { "text/plain": [ - "TextBlock(text=\"Hello Jeremy! It's nice to meet you. How are you doing today? Is there anything in particular you'd like to chat about or any questions you have?\", type='text')" + "TextBlock(text=\"Hello Jeremy! It's nice to meet you. Is there anything I can help you with today?\", type='text')" ] }, "execution_count": null, @@ -326,7 +326,7 @@ { "data": { "text/plain": [ - "\"Hello Jeremy! It's nice to meet you. How are you doing today? Is there anything in particular you'd like to chat about or any questions you have?\"" + "\"Hello Jeremy! It's nice to meet you. Is there anything I can help you with today?\"" ] }, "execution_count": null, @@ -375,23 +375,23 @@ { "data": { "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How are you doing today? Is there anything in particular you'd like to chat about or any questions you have?\n", + "Hello Jeremy! It's nice to meet you. Is there anything I can help you with today?\n", "\n", "
\n", "\n", - "- id: `msg_01FGnZXGWy9YJirXgTr2Dvtw`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How are you doing today? Is there anything in particular you'd like to chat about or any questions you have?\", 'type': 'text'}]`\n", + "- id: `msg_019qLJxay5HTSe8krZkYDgoV`\n", + "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. Is there anything I can help you with today?\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 36}`\n", + "- usage: `{'input_tokens': 10, 'output_tokens': 23}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01FGnZXGWy9YJirXgTr2Dvtw', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How are you doing today? Is there anything in particular you'd like to chat about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=Usage(input_tokens=10, output_tokens=36))" + "Message(id='msg_019qLJxay5HTSe8krZkYDgoV', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. Is there anything I can help you with today?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=Usage(input_tokens=10, output_tokens=23))" ] }, "execution_count": null, @@ -422,7 +422,7 @@ { "data": { "text/plain": [ - "Usage(input_tokens=10, output_tokens=36)" + "Usage(input_tokens=10, output_tokens=23)" ] }, "execution_count": null, @@ -548,7 +548,7 @@ { "data": { "text/plain": [ - "In: 10; Out: 36; Total: 46" + "In: 10; Out: 23; Total: 33" ] }, "execution_count": null, @@ -591,7 +591,7 @@ { "data": { "text/plain": [ - "In: 20; Out: 72; Total: 92" + "In: 20; Out: 46; Total: 66" ] }, "execution_count": null, @@ -682,12 +682,12 @@ { "data": { "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to chat about or any questions you have?\n", + "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\n", "\n", "
\n", "\n", - "- id: `msg_01Niya8iaTEcjrWhYs9FjbgM`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to chat about or any questions you have?\", 'type': 'text'}]`\n", + "- id: `msg_017RAP4TephYyTFyKyfhsUxk`\n", + "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", @@ -698,7 +698,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01Niya8iaTEcjrWhYs9FjbgM', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to chat about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Total: 46)" + "Message(id='msg_017RAP4TephYyTFyKyfhsUxk', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Total: 46)" ] }, "execution_count": null, @@ -760,7 +760,7 @@ "text/plain": [ "[{'role': 'user', 'content': \"I'm Jeremy\"},\n", " {'role': 'assistant',\n", - " 'content': [TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to chat about or any questions you have?\", type='text')]},\n", + " 'content': [TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')]},\n", " {'role': 'user', 'content': 'I forgot my name. Can you remind me please?'}]" ] }, @@ -795,7 +795,7 @@ "\n", "
\n", "\n", - "- id: `msg_01Tk8ik3NgHD2Aqf82eTTyje`\n", + "- id: `msg_01YWeAebsvvgLXSfy1HgZbNK`\n", "- content: `[{'text': 'Of course! You just told me that your name is Jeremy.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", @@ -807,7 +807,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01Tk8ik3NgHD2Aqf82eTTyje', content=[TextBlock(text='Of course! You just told me that your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 60; Out: 16; Total: 76)" + "Message(id='msg_01YWeAebsvvgLXSfy1HgZbNK', content=[TextBlock(text='Of course! You just told me that your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 60; Out: 16; Total: 76)" ] }, "execution_count": null, @@ -940,6 +940,25 @@ "id": "6520a355", "metadata": {}, "outputs": [], + "source": [ + "#| exports\n", + "@patch\n", + "def _log(self:Client, final, prefill, msgs, maxtok=None, sp=None, temp=None, stream=None, stop=None, **kwargs):\n", + " self._r(final, prefill)\n", + " if self.log is not None: self.log.append({\n", + " \"msgs\": msgs, \"prefill\": prefill, **kwargs,\n", + " \"msgs\": msgs, \"prefill\": prefill, \"maxtok\": maxtok, \"sp\": sp, \"temp\": temp, \"stream\": stream, \"stop\": stop, **kwargs,\n", + " \"result\": self.result, \"use\": self.use, \"stop_reason\": self.stop_reason, \"stop_sequence\": self.stop_sequence\n", + " })\n", + " return self.result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6901dce2", + "metadata": {}, + "outputs": [], "source": [ "#| exports\n", "@patch\n", @@ -947,11 +966,7 @@ " with self.c.messages.stream(model=self.model, messages=mk_msgs(msgs), **kwargs) as s:\n", " if prefill: yield(prefill)\n", " yield from s.text_stream\n", - " self._r(s.get_final_message(), prefill)\n", - " if self.log is not None: self.log.append({\n", - " \"msgs\": msgs, \"prefill\": prefill, **kwargs,\n", - " \"result\": self.result, \"use\": self.use, \"stop_reason\": self.stop_reason, \"stop_sequence\": self.stop_sequence\n", - " })" + " self._log(s.get_final_message(), prefill, msgs, **kwargs)" ] }, { @@ -968,6 +983,25 @@ "id": "835638bb", "metadata": {}, "outputs": [], + "source": [ + "#| exports\n", + "@patch\n", + "def _precall(self:Client, msgs, prefill, stop, kwargs):\n", + " pref = [prefill.strip()] if prefill else []\n", + " if not isinstance(msgs,list): msgs = [msgs]\n", + " if stop is not None:\n", + " if not isinstance(stop, (list)): stop = [stop]\n", + " kwargs[\"stop_sequences\"] = stop\n", + " msgs = mk_msgs(msgs+pref)\n", + " return msgs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c21c2113", + "metadata": {}, + "outputs": [], "source": [ "#| exports\n", "@patch\n", @@ -982,21 +1016,11 @@ " stop=None, # Stop sequence\n", " **kwargs):\n", " \"Make a call to Claude.\"\n", - " pref = [prefill.strip()] if prefill else []\n", - " if not isinstance(msgs,list): msgs = [msgs]\n", - " if stop is not None:\n", - " if not isinstance(stop, (list)): stop = [stop]\n", - " kwargs[\"stop_sequences\"] = stop\n", - " msgs = mk_msgs(msgs+pref)\n", + " msgs = self._precall(msgs, prefill, stop, kwargs)\n", " if stream: return self._stream(msgs, prefill=prefill, max_tokens=maxtok, system=sp, temperature=temp, **kwargs)\n", " res = self.c.messages.create(\n", " model=self.model, messages=msgs, max_tokens=maxtok, system=sp, temperature=temp, **kwargs)\n", - " self._r(res, prefill)\n", - " if self.log is not None: self.log.append({\n", - " \"msgs\": msgs, \"maxtok\": maxtok, \"sp\": sp, \"temp\": temp, \"prefill\": prefill, \"stream\": stream, \"stop\": stop, **kwargs,\n", - " \"result\": res, \"use\": self.use, \"stop_reason\": self.stop_reason, \"stop_sequence\": self.stop_sequence\n", - " })\n", - " return self.result" + " return self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, stop=stop, **kwargs)" ] }, { @@ -1052,7 +1076,7 @@ "\n", "
\n", "\n", - "- id: `msg_01BLmTLhfTA4xjPdWw395Beg`\n", + "- id: `msg_01ADNqsrkyiEoMpqQy8WY8DC`\n", "- content: `[{'text': 'Hello! How can I assist you today?', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -1064,7 +1088,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01BLmTLhfTA4xjPdWw395Beg', content=[TextBlock(text='Hello! How can I assist you today?', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 8; Out: 12; Total: 20)" + "Message(id='msg_01ADNqsrkyiEoMpqQy8WY8DC', content=[TextBlock(text='Hello! How can I assist you today?', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 8; Out: 12; Total: 20)" ] }, "execution_count": null, @@ -1129,7 +1153,7 @@ "\n", "
\n", "\n", - "- id: `msg_0128Snr458wB82JKr1fCYQ2j`\n", + "- id: `msg_01RxVtMH3djiS3pRc8Do2SFA`\n", "- content: `[{'text': 'According to Douglas Adams, \"The answer to the ultimate question of life, the universe, and everything is 42.\"', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -1141,7 +1165,7 @@ "
" ], "text/plain": [ - "Message(id='msg_0128Snr458wB82JKr1fCYQ2j', content=[TextBlock(text='According to Douglas Adams, \"The answer to the ultimate question of life, the universe, and everything is 42.\"', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 24; Out: 23; Total: 47)" + "Message(id='msg_01RxVtMH3djiS3pRc8Do2SFA', content=[TextBlock(text='According to Douglas Adams, \"The answer to the ultimate question of life, the universe, and everything is 42.\"', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 24; Out: 23; Total: 47)" ] }, "execution_count": null, @@ -1260,7 +1284,7 @@ "\n", "
\n", "\n", - "- id: `msg_0158semfBqxcbwfb26HuT4dt`\n", + "- id: `msg_013P6ntb5vP2XGwUdDrAcxSP`\n", "- content: `[{'text': '1, 2, 3, 4, ', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -1272,7 +1296,7 @@ "
" ], "text/plain": [ - "Message(id='msg_0158semfBqxcbwfb26HuT4dt', content=[TextBlock(text='1, 2, 3, 4, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='5', type='message', usage=In: 15; Out: 14; Total: 29)" + "Message(id='msg_013P6ntb5vP2XGwUdDrAcxSP', content=[TextBlock(text='1, 2, 3, 4, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='5', type='message', usage=In: 15; Out: 14; Total: 29)" ] }, "execution_count": null, @@ -1335,7 +1359,12 @@ " 'system': '',\n", " 'temperature': 0,\n", " 'stop_sequences': ['2', 'yellow'],\n", - " 'result': Message(id='msg_01X93po3iMD7VJjazQhPe4hb', content=[TextBlock(text='1, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='2', type='message', usage=In: 15; Out: 5; Total: 20),\n", + " 'maxtok': None,\n", + " 'sp': None,\n", + " 'temp': None,\n", + " 'stream': None,\n", + " 'stop': None,\n", + " 'result': Message(id='msg_01Mcd8Mxnw3zMwtUy1y5Q1gf', content=[TextBlock(text='1, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='2', type='message', usage=In: 15; Out: 5; Total: 20),\n", " 'use': In: 94; Out: 89; Total: 183,\n", " 'stop_reason': 'stop_sequence',\n", " 'stop_sequence': '2'}" @@ -1475,12 +1504,12 @@ { "data": { "text/markdown": [ - "ToolUseBlock(id='toolu_01PEuTFPHoKt3Evpi9EN7fRw', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", + "ToolUseBlock(id='toolu_01KZ2VpnTQmG26UzfAotM98g', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", "\n", "
\n", "\n", - "- id: `msg_01XGvtfvUhWiYQvfwwY5vjvc`\n", - "- content: `[{'id': 'toolu_01PEuTFPHoKt3Evpi9EN7fRw', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", + "- id: `msg_013KmnCBTXC1kJmefF9LfF3d`\n", + "- content: `[{'id': 'toolu_01KZ2VpnTQmG26UzfAotM98g', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", "- stop_reason: `tool_use`\n", @@ -1491,7 +1520,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01XGvtfvUhWiYQvfwwY5vjvc', content=[ToolUseBlock(id='toolu_01PEuTFPHoKt3Evpi9EN7fRw', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 493; Out: 53; Total: 546)" + "Message(id='msg_013KmnCBTXC1kJmefF9LfF3d', content=[ToolUseBlock(id='toolu_01KZ2VpnTQmG26UzfAotM98g', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 493; Out: 53; Total: 546)" ] }, "execution_count": null, @@ -1600,7 +1629,7 @@ "data": { "text/plain": [ "{'type': 'tool_result',\n", - " 'tool_use_id': 'toolu_01PEuTFPHoKt3Evpi9EN7fRw',\n", + " 'tool_use_id': 'toolu_01KZ2VpnTQmG26UzfAotM98g',\n", " 'content': '7063474'}" ] }, @@ -1661,10 +1690,10 @@ "data": { "text/plain": [ "[{'role': 'assistant',\n", - " 'content': [ToolUseBlock(id='toolu_01PEuTFPHoKt3Evpi9EN7fRw', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]},\n", + " 'content': [ToolUseBlock(id='toolu_01KZ2VpnTQmG26UzfAotM98g', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]},\n", " {'role': 'user',\n", " 'content': [{'type': 'tool_result',\n", - " 'tool_use_id': 'toolu_01PEuTFPHoKt3Evpi9EN7fRw',\n", + " 'tool_use_id': 'toolu_01KZ2VpnTQmG26UzfAotM98g',\n", " 'content': '7063474'}]}]" ] }, @@ -1859,25 +1888,45 @@ { "cell_type": "code", "execution_count": null, - "id": "bec85e37", + "id": "d0178ee1", + "metadata": {}, + "outputs": [], + "source": [ + "#| exports\n", + "@patch\n", + "def _post_pr(self:Chat, pr, prev_role):\n", + " if pr is None and prev_role == 'assistant':\n", + " if self.cont_pr is None:\n", + " raise ValueError(\"Prompt must be given after assistant completion, or use `self.cont_pr`.\")\n", + " pr = self.cont_pr # No user prompt, keep the chain\n", + " if pr: self.h.append(mk_msg(pr))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1b5c04e6", "metadata": {}, "outputs": [], "source": [ "#| exports\n", - "\n", "@patch\n", "def _append_pr(self:Chat,\n", " pr=None, # Prompt / message\n", " ):\n", - " prev_role = nested_idx(self.h, -1, 'role') if self.h else 'assistant' # First message should be 'user' if no history\n", - " if pr and prev_role == 'user':\n", - " self() # There's already a user request pending, so complete it\n", - " elif pr is None and prev_role == 'assistant':\n", - " if self.cont_pr is None:\n", - " raise ValueError(\"User prompt must be given after an assistant completion, or `self.cont_pr` must be specified.\")\n", - " pr = self.cont_pr # No user prompt, keep the `assistant,[user:cont_pr],assistant` chain\n", - " if pr: self.h.append(mk_msg(pr))\n", - "\n", + " prev_role = nested_idx(self.h, -1, 'role') if self.h else 'assistant' # First message should be 'user'\n", + " if pr and prev_role == 'user': self() # already user request pending\n", + " self._post_pr(pr, prev_role)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bec85e37", + "metadata": {}, + "outputs": [], + "source": [ + "#| exports\n", "@patch\n", "def __call__(self:Chat,\n", " pr=None, # Prompt / message\n", @@ -1912,23 +1961,23 @@ { "data": { "text/markdown": [ - "Your name is Jeremy, as you mentioned in your previous message.\n", + "Your name is Jeremy.\n", "\n", "
\n", "\n", - "- id: `msg_01JbjxLPmWQr6ebAwHBLjkUS`\n", - "- content: `[{'text': 'Your name is Jeremy, as you mentioned in your previous message.', 'type': 'text'}]`\n", + "- id: `msg_01DJHYAMYuC7TQSdMCmeQiaq`\n", + "- content: `[{'text': 'Your name is Jeremy.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 64, 'output_tokens': 16, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 235, 'output_tokens': 8, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01JbjxLPmWQr6ebAwHBLjkUS', content=[TextBlock(text='Your name is Jeremy, as you mentioned in your previous message.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 64; Out: 16; Total: 80)" + "Message(id='msg_01DJHYAMYuC7TQSdMCmeQiaq', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 235; Out: 8; Total: 243)" ] }, "execution_count": null, @@ -1981,19 +2030,19 @@ "\n", "
\n", "\n", - "- id: `msg_0131H5RhZ6vLvGioygWd6Sze`\n", + "- id: `msg_01PLpsyGYqjVTMnvtGqzh1qD`\n", "- content: `[{'text': \"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon answer. Common philosophical perspectives include:\\n\\n1. Finding personal fulfillment\\n2. Serving others\\n3. Pursuing happiness\\n4. Creating meaning through our choices\\n5. Experiencing and appreciating existence\\n\\nUltimately, many believe each individual must determine their own life's meaning.\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 100, 'output_tokens': 82, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 263, 'output_tokens': 82, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_0131H5RhZ6vLvGioygWd6Sze', content=[TextBlock(text=\"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon answer. Common philosophical perspectives include:\\n\\n1. Finding personal fulfillment\\n2. Serving others\\n3. Pursuing happiness\\n4. Creating meaning through our choices\\n5. Experiencing and appreciating existence\\n\\nUltimately, many believe each individual must determine their own life's meaning.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 100; Out: 82; Total: 182)" + "Message(id='msg_01PLpsyGYqjVTMnvtGqzh1qD', content=[TextBlock(text=\"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon answer. Common philosophical perspectives include:\\n\\n1. Finding personal fulfillment\\n2. Serving others\\n3. Pursuing happiness\\n4. Creating meaning through our choices\\n5. Experiencing and appreciating existence\\n\\nUltimately, many believe each individual must determine their own life's meaning.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 263; Out: 82; Total: 345)" ] }, "execution_count": null, @@ -2023,7 +2072,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Error: User prompt must be given after an assistant completion, or `self.cont_pr` must be specified.\n" + "Error: Prompt must be given after assistant completion, or use `self.cont_pr`.\n" ] } ], @@ -2049,36 +2098,36 @@ { "data": { "text/markdown": [ - "Continuing on the topic of life's meaning:\n", + "Continuing on the topic of the meaning of life:\n", "\n", "6. Achieving self-actualization\n", "7. Leaving a positive legacy\n", "8. Connecting with others and forming relationships\n", - "9. Exploring and understanding the universe\n", - "10. Evolving as a species\n", - "11. Overcoming challenges and growing\n", - "12. Finding balance between various aspects of life\n", - "13. Expressing creativity and individuality\n", - "14. Pursuing knowledge and wisdom\n", - "15. Living in harmony with nature\n", + "9. Pursuing knowledge and understanding\n", + "10. Embracing spiritual or religious beliefs\n", + "11. Overcoming challenges and growing as a person\n", + "12. Contributing to the advancement of humanity\n", + "13. Finding balance and harmony in life\n", + "14. Expressing creativity and individuality\n", + "15. Experiencing love in its various forms\n", "\n", - "These perspectives often overlap and can be combined in various ways. Some argue that the absence of an inherent meaning allows for the freedom to create our own purpose.\n", + "These perspectives often overlap and can be combined in various ways. The search for meaning itself is considered by some to be an essential part of the human experience.\n", "\n", "
\n", "\n", - "- id: `msg_016qojmGqVSCp5AW9pmduwNv`\n", - "- content: `[{'text': \"Continuing on the topic of life's meaning:\\n\\n6. Achieving self-actualization\\n7. Leaving a positive legacy\\n8. Connecting with others and forming relationships\\n9. Exploring and understanding the universe\\n10. Evolving as a species\\n11. Overcoming challenges and growing\\n12. Finding balance between various aspects of life\\n13. Expressing creativity and individuality\\n14. Pursuing knowledge and wisdom\\n15. Living in harmony with nature\\n\\nThese perspectives often overlap and can be combined in various ways. Some argue that the absence of an inherent meaning allows for the freedom to create our own purpose.\", 'type': 'text'}]`\n", + "- id: `msg_0195oiRnvGTs64CCiQrESVn4`\n", + "- content: `[{'text': 'Continuing on the topic of the meaning of life:\\n\\n6. Achieving self-actualization\\n7. Leaving a positive legacy\\n8. Connecting with others and forming relationships\\n9. Pursuing knowledge and understanding\\n10. Embracing spiritual or religious beliefs\\n11. Overcoming challenges and growing as a person\\n12. Contributing to the advancement of humanity\\n13. Finding balance and harmony in life\\n14. Expressing creativity and individuality\\n15. Experiencing love in its various forms\\n\\nThese perspectives often overlap and can be combined in various ways. The search for meaning itself is considered by some to be an essential part of the human experience.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 188, 'output_tokens': 134, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 351, 'output_tokens': 139, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_016qojmGqVSCp5AW9pmduwNv', content=[TextBlock(text=\"Continuing on the topic of life's meaning:\\n\\n6. Achieving self-actualization\\n7. Leaving a positive legacy\\n8. Connecting with others and forming relationships\\n9. Exploring and understanding the universe\\n10. Evolving as a species\\n11. Overcoming challenges and growing\\n12. Finding balance between various aspects of life\\n13. Expressing creativity and individuality\\n14. Pursuing knowledge and wisdom\\n15. Living in harmony with nature\\n\\nThese perspectives often overlap and can be combined in various ways. Some argue that the absence of an inherent meaning allows for the freedom to create our own purpose.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 188; Out: 134; Total: 322)" + "Message(id='msg_0195oiRnvGTs64CCiQrESVn4', content=[TextBlock(text='Continuing on the topic of the meaning of life:\\n\\n6. Achieving self-actualization\\n7. Leaving a positive legacy\\n8. Connecting with others and forming relationships\\n9. Pursuing knowledge and understanding\\n10. Embracing spiritual or religious beliefs\\n11. Overcoming challenges and growing as a person\\n12. Contributing to the advancement of humanity\\n13. Finding balance and harmony in life\\n14. Expressing creativity and individuality\\n15. Experiencing love in its various forms\\n\\nThese perspectives often overlap and can be combined in various ways. The search for meaning itself is considered by some to be an essential part of the human experience.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 351; Out: 139; Total: 490)" ] }, "execution_count": null, @@ -2112,7 +2161,7 @@ " 'content': [TextBlock(text=\"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon answer. Common philosophical perspectives include:\\n\\n1. Finding personal fulfillment\\n2. Serving others\\n3. Pursuing happiness\\n4. Creating meaning through our choices\\n5. Experiencing and appreciating existence\\n\\nUltimately, many believe each individual must determine their own life's meaning.\", type='text')]},\n", " {'role': 'user', 'content': 'keep going...'},\n", " {'role': 'assistant',\n", - " 'content': [TextBlock(text=\"Continuing on the topic of life's meaning:\\n\\n6. Achieving self-actualization\\n7. Leaving a positive legacy\\n8. Connecting with others and forming relationships\\n9. Exploring and understanding the universe\\n10. Evolving as a species\\n11. Overcoming challenges and growing\\n12. Finding balance between various aspects of life\\n13. Expressing creativity and individuality\\n14. Pursuing knowledge and wisdom\\n15. Living in harmony with nature\\n\\nThese perspectives often overlap and can be combined in various ways. Some argue that the absence of an inherent meaning allows for the freedom to create our own purpose.\", type='text')]}]" + " 'content': [TextBlock(text='Continuing on the topic of the meaning of life:\\n\\n6. Achieving self-actualization\\n7. Leaving a positive legacy\\n8. Connecting with others and forming relationships\\n9. Pursuing knowledge and understanding\\n10. Embracing spiritual or religious beliefs\\n11. Overcoming challenges and growing as a person\\n12. Contributing to the advancement of humanity\\n13. Finding balance and harmony in life\\n14. Expressing creativity and individuality\\n15. Experiencing love in its various forms\\n\\nThese perspectives often overlap and can be combined in various ways. The search for meaning itself is considered by some to be an essential part of the human experience.', type='text')]}]" ] }, "execution_count": null, @@ -2169,10 +2218,9 @@ "4. Experiencing love and relationships\n", "5. Creating or appreciating art\n", "6. Achieving goals\n", - "7. Living according to one's values\n", - "8. Spiritual or religious devotion\n", + "7. Living according to religious or spiritual beliefs\n", "\n", - "Ultimately, many believe each individual must define their own meaning." + "Ultimately, many philosophers argue that each individual must determine their own meaning." ] } ], diff --git a/02_async.ipynb b/02_async.ipynb index 1f42533..94d1535 100644 --- a/02_async.ipynb +++ b/02_async.ipynb @@ -39,17 +39,10 @@ "try: from IPython import display\n", "except: display=None\n", "\n", - "from anthropic import Anthropic, AnthropicBedrock, AnthropicVertex, AsyncAnthropic\n", - "from anthropic.types import Usage, TextBlock, Message, ToolUseBlock\n", - "from anthropic.resources import messages\n", - "\n", - "import toolslm\n", - "from toolslm.funccall import *\n", - "\n", - "from fastcore import imghdr\n", + "from anthropic import AsyncAnthropic\n", + "from toolslm.funccall import get_schema\n", "from fastcore.meta import delegates\n", "from fastcore.utils import *\n", - "\n", "from claudette.core import *" ] }, @@ -92,12 +85,12 @@ { "data": { "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything in particular you'd like to discuss or any questions you have?\n", + "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\n", "\n", "
\n", "\n", - "- id: `msg_014FHSMB3ve62Nb3JhNox8ks`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything in particular you'd like to discuss or any questions you have?\", 'type': 'text'}]`\n", + "- id: `msg_01Tk1KAUqBU9LXipxfNtdLct`\n", + "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", @@ -108,7 +101,7 @@ "
" ], "text/plain": [ - "Message(id='msg_014FHSMB3ve62Nb3JhNox8ks', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything in particular you'd like to discuss or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Total: 46)" + "Message(id='msg_01Tk1KAUqBU9LXipxfNtdLct', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Total: 46)" ] }, "execution_count": null, @@ -180,13 +173,9 @@ "@patch\n", "async def _stream(self:AsyncClient, msgs:list, prefill='', **kwargs):\n", " async with self.c.messages.stream(model=self.model, messages=mk_msgs(msgs), **kwargs) as s:\n", - " if prefill: yield(prefill)\n", + " if prefill: yield prefill\n", " async for o in s.text_stream: yield o\n", - " self._r(await s.get_final_message(), prefill)\n", - " if self.log is not None: self.log.append({\n", - " \"msgs\": msgs, \"prefill\": prefill, **kwargs,\n", - " \"result\": self.result, \"use\": self.use, \"stop_reason\": self.stop_reason, \"stop_sequence\": self.stop_sequence\n", - " })" + " self._log(await s.get_final_message(), prefill, msgs, kwargs)" ] }, { @@ -198,7 +187,7 @@ "source": [ "#| exports\n", "@patch\n", - "@delegates(messages.Messages.create)\n", + "@delegates(Client)\n", "async def __call__(self:AsyncClient,\n", " msgs:list, # List of messages in the dialog\n", " sp='', # The system prompt\n", @@ -208,22 +197,12 @@ " stream:bool=False, # Stream response?\n", " stop=None, # Stop sequence\n", " **kwargs):\n", - " \"Make a call to Claude.\"\n", - " pref = [prefill.strip()] if prefill else []\n", - " if not isinstance(msgs,list): msgs = [msgs]\n", - " if stop is not None:\n", - " if not isinstance(stop, (list)): stop = [stop]\n", - " kwargs[\"stop_sequences\"] = stop\n", - " msgs = mk_msgs(msgs+pref)\n", + " \"Make an async call to Claude.\"\n", + " msgs = self._precall(msgs, prefill, stop, kwargs)\n", " if stream: return self._stream(msgs, prefill=prefill, max_tokens=maxtok, system=sp, temperature=temp, **kwargs)\n", " res = await self.c.messages.create(\n", " model=self.model, messages=msgs, max_tokens=maxtok, system=sp, temperature=temp, **kwargs)\n", - " self._r(res, prefill)\n", - " if self.log is not None: self.log.append({\n", - " \"msgs\": msgs, \"maxtok\": maxtok, \"sp\": sp, \"temp\": temp, \"prefill\": prefill, \"stream\": stream, \"stop\": stop, **kwargs,\n", - " \"result\": res, \"use\": self.use, \"stop_reason\": self.stop_reason, \"stop_sequence\": self.stop_sequence\n", - " })\n", - " return self.result" + " return self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, stop=stop, **kwargs)" ] }, { @@ -261,7 +240,7 @@ "\n", "
\n", "\n", - "- id: `msg_01MHi9XJJsATikSrieuGj1Vi`\n", + "- id: `msg_01BHN4QUDrDkdxp88au9bvbq`\n", "- content: `[{'text': 'Hello! How can I assist you today? Feel free to ask any questions or let me know if you need help with anything.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", @@ -273,7 +252,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01MHi9XJJsATikSrieuGj1Vi', content=[TextBlock(text='Hello! How can I assist you today? Feel free to ask any questions or let me know if you need help with anything.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 8; Out: 29; Total: 37)" + "Message(id='msg_01BHN4QUDrDkdxp88au9bvbq', content=[TextBlock(text='Hello! How can I assist you today? Feel free to ask any questions or let me know if you need help with anything.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 8; Out: 29; Total: 37)" ] }, "execution_count": null, @@ -320,7 +299,7 @@ "\n", "
\n", "\n", - "- id: `msg_01MsAKN59drmNUKuMxkkGijs`\n", + "- id: `msg_01Vam3xyyfx1o24eJg8F6Eru`\n", "- content: `[{'text': \"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon meaning of life. Many philosophers and religions have proposed different answers, but it remains an open question that individuals must grapple with for themselves.\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", @@ -332,7 +311,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01MsAKN59drmNUKuMxkkGijs', content=[TextBlock(text=\"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon meaning of life. Many philosophers and religions have proposed different answers, but it remains an open question that individuals must grapple with for themselves.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 24; Out: 51; Total: 75)" + "Message(id='msg_01Vam3xyyfx1o24eJg8F6Eru', content=[TextBlock(text=\"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon meaning of life. Many philosophers and religions have proposed different answers, but it remains an open question that individuals must grapple with for themselves.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 24; Out: 51; Total: 75)" ] }, "execution_count": null, @@ -512,16 +491,14 @@ "outputs": [], "source": [ "#| exports\n", + "@delegates()\n", "class AsyncChat(Chat):\n", " def __init__(self,\n", " model:Optional[str]=None, # Model to use (leave empty if passing `cli`)\n", " cli:Optional[Client]=None, # Client to use (leave empty if passing `model`)\n", - " sp='', # Optional system prompt\n", - " tools:Optional[list]=None, # List of tools to make available to Claude\n", - " cont_pr:Optional[str]=None, # User prompt to continue an assistant response\n", - " tool_choice:Optional[dict]=None): # Optionally force use of some tool\n", + " **kwargs):\n", " \"Anthropic async chat client.\"\n", - " super().__init__(model, cli, sp, tools, cont_pr=cont_pr, tool_choice=tool_choice)\n", + " super().__init__(model, cli, **kwargs)\n", " if not cli: self.c = AsyncClient(model)" ] }, @@ -571,17 +548,10 @@ "source": [ "#| exports\n", "@patch\n", - "async def _append_pr(self:AsyncChat,\n", - " pr=None, # Prompt / message\n", - "):\n", + "async def _append_pr(self:AsyncChat, pr=None):\n", " prev_role = nested_idx(self.h, -1, 'role') if self.h else 'assistant' # First message should be 'user' if no history\n", - " if pr and prev_role == 'user':\n", - " await self() # There's already a user request pending, so complete it\n", - " elif pr is None and prev_role == 'assistant':\n", - " if self.cont_pr is None:\n", - " raise ValueError(\"User prompt must be given after an assistant completion, or `self.cont_pr` must be specified.\")\n", - " pr = self.cont_pr # No user prompt, keep the `assistant,[user:cont_pr],assistant` chain\n", - " if pr: self.h.append(mk_msg(pr))" + " if pr and prev_role == 'user': await self()\n", + " self._post_pr(pr, prev_role)" ] }, { @@ -617,23 +587,23 @@ { "data": { "text/markdown": [ - "Your name is Jeremy, as you've stated twice now.\n", + "Your name is Jeremy, as you mentioned in your previous message.\n", "\n", "
\n", "\n", - "- id: `msg_01Bbiu5wTJRK3X4e6t9R8vTQ`\n", - "- content: `[{'text': \"Your name is Jeremy, as you've stated twice now.\", 'type': 'text'}]`\n", + "- id: `msg_01RysDG9ppAP2Zf5e82BBSMT`\n", + "- content: `[{'text': 'Your name is Jeremy, as you mentioned in your previous message.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 111, 'output_tokens': 15, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 64, 'output_tokens': 16, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01Bbiu5wTJRK3X4e6t9R8vTQ', content=[TextBlock(text=\"Your name is Jeremy, as you've stated twice now.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 111; Out: 15; Total: 126)" + "Message(id='msg_01RysDG9ppAP2Zf5e82BBSMT', content=[TextBlock(text='Your name is Jeremy, as you mentioned in your previous message.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 64; Out: 16; Total: 80)" ] }, "execution_count": null, @@ -667,19 +637,19 @@ "\n", "
\n", "\n", - "- id: `msg_01GFxFixQD5G9sMmjRNXT3q4`\n", + "- id: `msg_01VPWUQn5Do1Kst8RYUDQvCu`\n", "- content: `[{'text': \"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon answer. Common philosophical perspectives include:\\n\\n1. Finding personal fulfillment\\n2. Serving others\\n3. Pursuing happiness\\n4. Creating meaning through our choices\\n5. Experiencing and appreciating existence\\n\\nUltimately, many believe each individual must determine their own life's meaning.\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 146, 'output_tokens': 82, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 100, 'output_tokens': 82, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01GFxFixQD5G9sMmjRNXT3q4', content=[TextBlock(text=\"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon answer. Common philosophical perspectives include:\\n\\n1. Finding personal fulfillment\\n2. Serving others\\n3. Pursuing happiness\\n4. Creating meaning through our choices\\n5. Experiencing and appreciating existence\\n\\nUltimately, many believe each individual must determine their own life's meaning.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 146; Out: 82; Total: 228)" + "Message(id='msg_01VPWUQn5Do1Kst8RYUDQvCu', content=[TextBlock(text=\"According to Douglas Adams, the meaning of life is 42. More seriously, there's no universally agreed upon answer. Common philosophical perspectives include:\\n\\n1. Finding personal fulfillment\\n2. Serving others\\n3. Pursuing happiness\\n4. Creating meaning through our choices\\n5. Experiencing and appreciating existence\\n\\nUltimately, many believe each individual must determine their own life's meaning.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 100; Out: 82; Total: 182)" ] }, "execution_count": null, @@ -732,8 +702,8 @@ "\n", "
\n", "\n", - "- id: `msg_01ViaZgiBRaqqT3uvGdMtvVT`\n", - "- content: `[{'text': 'To answer this question, I can use the \"sums\" function to add these two numbers together. Let me do that for you.', 'type': 'text'}, {'id': 'toolu_01JXmhmbfZjoH9FswYBXcoYa', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", + "- id: `msg_015z1rffSWFxvj7rSpzc43ZE`\n", + "- content: `[{'text': 'To answer this question, I can use the \"sums\" function to add these two numbers together. Let me do that for you.', 'type': 'text'}, {'id': 'toolu_01SNKhtfnDQBC4RGY4mUCq1v', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `tool_use`\n", @@ -744,7 +714,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01ViaZgiBRaqqT3uvGdMtvVT', content=[TextBlock(text='To answer this question, I can use the \"sums\" function to add these two numbers together. Let me do that for you.', type='text'), ToolUseBlock(id='toolu_01JXmhmbfZjoH9FswYBXcoYa', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 428; Out: 101; Total: 529)" + "Message(id='msg_015z1rffSWFxvj7rSpzc43ZE', content=[TextBlock(text='To answer this question, I can use the \"sums\" function to add these two numbers together. Let me do that for you.', type='text'), ToolUseBlock(id='toolu_01SNKhtfnDQBC4RGY4mUCq1v', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 428; Out: 101; Total: 529)" ] }, "execution_count": null, @@ -772,7 +742,7 @@ "\n", "
\n", "\n", - "- id: `msg_01AdZn4SvbKgCZieHqWSyecL`\n", + "- id: `msg_018KAsE2YGiXWjUJkLPrXpb2`\n", "- content: `[{'text': 'The sum of 604542 and 6458932 is 7063474.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", @@ -784,7 +754,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01AdZn4SvbKgCZieHqWSyecL', content=[TextBlock(text='The sum of 604542 and 6458932 is 7063474.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 543; Out: 23; Total: 566)" + "Message(id='msg_018KAsE2YGiXWjUJkLPrXpb2', content=[TextBlock(text='The sum of 604542 and 6458932 is 7063474.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 543; Out: 23; Total: 566)" ] }, "execution_count": null, @@ -816,23 +786,23 @@ { "data": { "text/markdown": [ - "The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple wildflower, blooming in the background behind the adorable puppy in the foreground.\n", + "The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple daisy, blooming in the background behind the adorable puppy in the foreground.\n", "\n", "
\n", "\n", - "- id: `msg_01VeDwyX7fn3bDKZ8ES8qgL1`\n", - "- content: `[{'text': 'The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple wildflower, blooming in the background behind the adorable puppy in the foreground.', 'type': 'text'}]`\n", + "- id: `msg_017qgZggLjUY915mWbWCkb9X`\n", + "- content: `[{'text': 'The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple daisy, blooming in the background behind the adorable puppy in the foreground.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 110, 'output_tokens': 51, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 110, 'output_tokens': 50, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01VeDwyX7fn3bDKZ8ES8qgL1', content=[TextBlock(text='The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple wildflower, blooming in the background behind the adorable puppy in the foreground.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 110; Out: 51; Total: 161)" + "Message(id='msg_017qgZggLjUY915mWbWCkb9X', content=[TextBlock(text='The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple daisy, blooming in the background behind the adorable puppy in the foreground.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 110; Out: 50; Total: 160)" ] }, "execution_count": null, diff --git a/claudette/_modidx.py b/claudette/_modidx.py index 60bb853..81c5441 100644 --- a/claudette/_modidx.py +++ b/claudette/_modidx.py @@ -17,11 +17,14 @@ 'claudette.core.Chat.__call__': ('core.html#chat.__call__', 'claudette/core.py'), 'claudette.core.Chat.__init__': ('core.html#chat.__init__', 'claudette/core.py'), 'claudette.core.Chat._append_pr': ('core.html#chat._append_pr', 'claudette/core.py'), + 'claudette.core.Chat._post_pr': ('core.html#chat._post_pr', 'claudette/core.py'), 'claudette.core.Chat._stream': ('core.html#chat._stream', 'claudette/core.py'), 'claudette.core.Chat.use': ('core.html#chat.use', 'claudette/core.py'), 'claudette.core.Client': ('core.html#client', 'claudette/core.py'), 'claudette.core.Client.__call__': ('core.html#client.__call__', 'claudette/core.py'), 'claudette.core.Client.__init__': ('core.html#client.__init__', 'claudette/core.py'), + 'claudette.core.Client._log': ('core.html#client._log', 'claudette/core.py'), + 'claudette.core.Client._precall': ('core.html#client._precall', 'claudette/core.py'), 'claudette.core.Client._r': ('core.html#client._r', 'claudette/core.py'), 'claudette.core.Client._stream': ('core.html#client._stream', 'claudette/core.py'), 'claudette.core.Message._repr_markdown_': ('core.html#message._repr_markdown_', 'claudette/core.py'), diff --git a/claudette/asink.py b/claudette/asink.py index 53440c3..b3089f6 100644 --- a/claudette/asink.py +++ b/claudette/asink.py @@ -9,17 +9,10 @@ try: from IPython import display except: display=None -from anthropic import Anthropic, AnthropicBedrock, AnthropicVertex, AsyncAnthropic -from anthropic.types import Usage, TextBlock, Message, ToolUseBlock -from anthropic.resources import messages - -import toolslm -from toolslm.funccall import * - -from fastcore import imghdr +from anthropic import AsyncAnthropic +from toolslm.funccall import get_schema from fastcore.meta import delegates from fastcore.utils import * - from .core import * # %% ../02_async.ipynb @@ -33,17 +26,13 @@ def __init__(self, model, cli=None, log=False): @patch async def _stream(self:AsyncClient, msgs:list, prefill='', **kwargs): async with self.c.messages.stream(model=self.model, messages=mk_msgs(msgs), **kwargs) as s: - if prefill: yield(prefill) + if prefill: yield prefill async for o in s.text_stream: yield o - self._r(await s.get_final_message(), prefill) - if self.log is not None: self.log.append({ - "msgs": msgs, "prefill": prefill, **kwargs, - "result": self.result, "use": self.use, "stop_reason": self.stop_reason, "stop_sequence": self.stop_sequence - }) + self._log(await s.get_final_message(), prefill, msgs, kwargs) # %% ../02_async.ipynb @patch -@delegates(messages.Messages.create) +@delegates(Client) async def __call__(self:AsyncClient, msgs:list, # List of messages in the dialog sp='', # The system prompt @@ -53,34 +42,22 @@ async def __call__(self:AsyncClient, stream:bool=False, # Stream response? stop=None, # Stop sequence **kwargs): - "Make a call to Claude." - pref = [prefill.strip()] if prefill else [] - if not isinstance(msgs,list): msgs = [msgs] - if stop is not None: - if not isinstance(stop, (list)): stop = [stop] - kwargs["stop_sequences"] = stop - msgs = mk_msgs(msgs+pref) + "Make an async call to Claude." + msgs = self._precall(msgs, prefill, stop, kwargs) if stream: return self._stream(msgs, prefill=prefill, max_tokens=maxtok, system=sp, temperature=temp, **kwargs) res = await self.c.messages.create( model=self.model, messages=msgs, max_tokens=maxtok, system=sp, temperature=temp, **kwargs) - self._r(res, prefill) - if self.log is not None: self.log.append({ - "msgs": msgs, "maxtok": maxtok, "sp": sp, "temp": temp, "prefill": prefill, "stream": stream, "stop": stop, **kwargs, - "result": res, "use": self.use, "stop_reason": self.stop_reason, "stop_sequence": self.stop_sequence - }) - return self.result + return self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, stop=stop, **kwargs) # %% ../02_async.ipynb +@delegates() class AsyncChat(Chat): def __init__(self, model:Optional[str]=None, # Model to use (leave empty if passing `cli`) cli:Optional[Client]=None, # Client to use (leave empty if passing `model`) - sp='', # Optional system prompt - tools:Optional[list]=None, # List of tools to make available to Claude - cont_pr:Optional[str]=None, # User prompt to continue an assistant response - tool_choice:Optional[dict]=None): # Optionally force use of some tool + **kwargs): "Anthropic async chat client." - super().__init__(model, cli, sp, tools, cont_pr=cont_pr, tool_choice=tool_choice) + super().__init__(model, cli, **kwargs) if not cli: self.c = AsyncClient(model) # %% ../02_async.ipynb @@ -91,14 +68,7 @@ async def _stream(self:AsyncChat, res): # %% ../02_async.ipynb @patch -async def _append_pr(self:AsyncChat, - pr=None, # Prompt / message -): +async def _append_pr(self:AsyncChat, pr=None): prev_role = nested_idx(self.h, -1, 'role') if self.h else 'assistant' # First message should be 'user' if no history - if pr and prev_role == 'user': - await self() # There's already a user request pending, so complete it - elif pr is None and prev_role == 'assistant': - if self.cont_pr is None: - raise ValueError("User prompt must be given after an assistant completion, or `self.cont_pr` must be specified.") - pr = self.cont_pr # No user prompt, keep the `assistant,[user:cont_pr],assistant` chain - if pr: self.h.append(mk_msg(pr)) + if pr and prev_role == 'user': await self() + self._post_pr(pr, prev_role) diff --git a/claudette/core.py b/claudette/core.py index 31466d8..4fcb3f0 100644 --- a/claudette/core.py +++ b/claudette/core.py @@ -101,17 +101,35 @@ def _r(self:Client, r:Message, prefill=''): self.stop_sequence = r.stop_sequence return r +# %% ../00_core.ipynb +@patch +def _log(self:Client, final, prefill, msgs, maxtok=None, sp=None, temp=None, stream=None, stop=None, **kwargs): + self._r(final, prefill) + if self.log is not None: self.log.append({ + "msgs": msgs, "prefill": prefill, **kwargs, + "msgs": msgs, "prefill": prefill, "maxtok": maxtok, "sp": sp, "temp": temp, "stream": stream, "stop": stop, **kwargs, + "result": self.result, "use": self.use, "stop_reason": self.stop_reason, "stop_sequence": self.stop_sequence + }) + return self.result + # %% ../00_core.ipynb @patch def _stream(self:Client, msgs:list, prefill='', **kwargs): with self.c.messages.stream(model=self.model, messages=mk_msgs(msgs), **kwargs) as s: if prefill: yield(prefill) yield from s.text_stream - self._r(s.get_final_message(), prefill) - if self.log is not None: self.log.append({ - "msgs": msgs, "prefill": prefill, **kwargs, - "result": self.result, "use": self.use, "stop_reason": self.stop_reason, "stop_sequence": self.stop_sequence - }) + self._log(s.get_final_message(), prefill, msgs, **kwargs) + +# %% ../00_core.ipynb +@patch +def _precall(self:Client, msgs, prefill, stop, kwargs): + pref = [prefill.strip()] if prefill else [] + if not isinstance(msgs,list): msgs = [msgs] + if stop is not None: + if not isinstance(stop, (list)): stop = [stop] + kwargs["stop_sequences"] = stop + msgs = mk_msgs(msgs+pref) + return msgs # %% ../00_core.ipynb @patch @@ -126,21 +144,11 @@ def __call__(self:Client, stop=None, # Stop sequence **kwargs): "Make a call to Claude." - pref = [prefill.strip()] if prefill else [] - if not isinstance(msgs,list): msgs = [msgs] - if stop is not None: - if not isinstance(stop, (list)): stop = [stop] - kwargs["stop_sequences"] = stop - msgs = mk_msgs(msgs+pref) + msgs = self._precall(msgs, prefill, stop, kwargs) if stream: return self._stream(msgs, prefill=prefill, max_tokens=maxtok, system=sp, temperature=temp, **kwargs) res = self.c.messages.create( model=self.model, messages=msgs, max_tokens=maxtok, system=sp, temperature=temp, **kwargs) - self._r(res, prefill) - if self.log is not None: self.log.append({ - "msgs": msgs, "maxtok": maxtok, "sp": sp, "temp": temp, "prefill": prefill, "stream": stream, "stop": stop, **kwargs, - "result": res, "use": self.use, "stop_reason": self.stop_reason, "stop_sequence": self.stop_sequence - }) - return self.result + return self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, stop=stop, **kwargs) # %% ../00_core.ipynb def mk_tool_choice(choose:Union[str,bool,None])->dict: @@ -206,20 +214,25 @@ def _stream(self:Chat, res): yield from res self.h += mk_toolres(self.c.result, ns=self.tools, obj=self) +# %% ../00_core.ipynb +@patch +def _post_pr(self:Chat, pr, prev_role): + if pr is None and prev_role == 'assistant': + if self.cont_pr is None: + raise ValueError("Prompt must be given after assistant completion, or use `self.cont_pr`.") + pr = self.cont_pr # No user prompt, keep the chain + if pr: self.h.append(mk_msg(pr)) + # %% ../00_core.ipynb @patch def _append_pr(self:Chat, pr=None, # Prompt / message ): - prev_role = nested_idx(self.h, -1, 'role') if self.h else 'assistant' # First message should be 'user' if no history - if pr and prev_role == 'user': - self() # There's already a user request pending, so complete it - elif pr is None and prev_role == 'assistant': - if self.cont_pr is None: - raise ValueError("User prompt must be given after an assistant completion, or `self.cont_pr` must be specified.") - pr = self.cont_pr # No user prompt, keep the `assistant,[user:cont_pr],assistant` chain - if pr: self.h.append(mk_msg(pr)) + prev_role = nested_idx(self.h, -1, 'role') if self.h else 'assistant' # First message should be 'user' + if pr and prev_role == 'user': self() # already user request pending + self._post_pr(pr, prev_role) +# %% ../00_core.ipynb @patch def __call__(self:Chat, pr=None, # Prompt / message