From 7c5d81a4de4c98d42d2484b13da90eb9c98f4f23 Mon Sep 17 00:00:00 2001 From: Jeremy Howard Date: Thu, 26 Sep 2024 12:20:13 +1000 Subject: [PATCH] fixes #32 --- 00_core.ipynb | 244 ++++++++++++++++++++++++++++++++++--------- claudette/_modidx.py | 1 + claudette/core.py | 61 +++++++---- 3 files changed, 237 insertions(+), 69 deletions(-) diff --git a/00_core.ipynb b/00_core.ipynb index 1ee5419..e57ce92 100644 --- a/00_core.ipynb +++ b/00_core.ipynb @@ -200,23 +200,23 @@ { "data": { "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\n", + "Hello Jeremy! It's nice to meet you. How can I assist you today? Feel free to ask me any questions or let me know if there's anything you'd like to chat about.\n", "\n", "
\n", "\n", - "- id: `msg_01JPdBwtCQbE9KUcFv2Ar295`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", 'type': 'text'}]`\n", + "- id: `msg_01E7gsNrfoxwETqowsoEwSoi`\n", + "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Feel free to ask me any questions or let me know if there's anything you'd like to chat about.\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 36}`\n", + "- usage: `{'input_tokens': 10, 'output_tokens': 42}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01JPdBwtCQbE9KUcFv2Ar295', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Cache create: 0; Cache read: 0; Total: 46)" + "Message(id='msg_01E7gsNrfoxwETqowsoEwSoi', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Feel free to ask me any questions or let me know if there's anything you'd like to chat about.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 42; Cache create: 0; Cache read: 0; Total: 52)" ] }, "execution_count": null, @@ -298,7 +298,7 @@ { "data": { "text/plain": [ - "TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')" + "TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Feel free to ask me any questions or let me know if there's anything you'd like to chat about.\", type='text')" ] }, "execution_count": null, @@ -342,7 +342,7 @@ { "data": { "text/plain": [ - "\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\"" + "\"Hello Jeremy! It's nice to meet you. How can I assist you today? Feel free to ask me any questions or let me know if there's anything you'd like to chat about.\"" ] }, "execution_count": null, @@ -392,23 +392,23 @@ { "data": { "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\n", + "Hello Jeremy! It's nice to meet you. How can I assist you today? Feel free to ask me any questions or let me know if there's anything you'd like to chat about.\n", "\n", "
\n", "\n", - "- id: `msg_01JPdBwtCQbE9KUcFv2Ar295`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", 'type': 'text'}]`\n", + "- id: `msg_01E7gsNrfoxwETqowsoEwSoi`\n", + "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Feel free to ask me any questions or let me know if there's anything you'd like to chat about.\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 36}`\n", + "- usage: `{'input_tokens': 10, 'output_tokens': 42}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01JPdBwtCQbE9KUcFv2Ar295', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Cache create: 0; Cache read: 0; Total: 46)" + "Message(id='msg_01E7gsNrfoxwETqowsoEwSoi', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Feel free to ask me any questions or let me know if there's anything you'd like to chat about.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 42; Cache create: 0; Cache read: 0; Total: 52)" ] }, "execution_count": null, @@ -439,7 +439,7 @@ { "data": { "text/plain": [ - "In: 10; Out: 36; Cache create: 0; Cache read: 0; Total: 46" + "In: 10; Out: 42; Cache create: 0; Cache read: 0; Total: 52" ] }, "execution_count": null, @@ -610,7 +610,7 @@ { "data": { "text/plain": [ - "In: 20; Out: 72; Cache create: 0; Cache read: 0; Total: 92" + "In: 20; Out: 84; Cache create: 0; Cache read: 0; Total: 104" ] }, "execution_count": null, @@ -701,23 +701,23 @@ { "data": { "text/markdown": [ - "Hello Jeremy! It's nice to meet you. Is there anything I can help you with today?\n", + "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to discuss or any questions you have?\n", "\n", "
\n", "\n", - "- id: `msg_01QY418WhBhWtextqTcgg7JE`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. Is there anything I can help you with today?\", 'type': 'text'}]`\n", + "- id: `msg_01VUHCgp3BPNz2n6voNeyi84`\n", + "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to discuss or any questions you have?\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 23}`\n", + "- usage: `{'input_tokens': 10, 'output_tokens': 35}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01QY418WhBhWtextqTcgg7JE', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. Is there anything I can help you with today?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 23; Cache create: 0; Cache read: 0; Total: 33)" + "Message(id='msg_01VUHCgp3BPNz2n6voNeyi84', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to discuss or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 35; Cache create: 0; Cache read: 0; Total: 45)" ] }, "execution_count": null, @@ -779,7 +779,7 @@ "text/plain": [ "[{'role': 'user', 'content': \"I'm Jeremy\"},\n", " {'role': 'assistant',\n", - " 'content': '[TextBlock(text=\"Hello Jeremy! It\\'s nice to meet you. Is there anything I can help you with today?\", type=\\'text\\')]'},\n", + " 'content': '[TextBlock(text=\"Hello Jeremy! It\\'s nice to meet you. How can I assist you today? Is there anything specific you\\'d like to discuss or any questions you have?\", type=\\'text\\')]'},\n", " {'role': 'user', 'content': 'I forgot my name. Can you remind me please?'}]" ] }, @@ -814,19 +814,19 @@ "\n", "
\n", "\n", - "- id: `msg_01Sz95VsrcBpE1kHEgDSvasL`\n", + "- id: `msg_01WmAnNhfhnFouAxKyaXBJqh`\n", "- content: `[{'text': 'Of course! You just told me that your name is Jeremy.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20240620`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 58, 'output_tokens': 16}`\n", + "- usage: `{'input_tokens': 70, 'output_tokens': 16}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01Sz95VsrcBpE1kHEgDSvasL', content=[TextBlock(text='Of course! You just told me that your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 58; Out: 16; Cache create: 0; Cache read: 0; Total: 74)" + "Message(id='msg_01WmAnNhfhnFouAxKyaXBJqh', content=[TextBlock(text='Of course! You just told me that your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 70; Out: 16; Cache create: 0; Cache read: 0; Total: 86)" ] }, "execution_count": null, @@ -932,7 +932,7 @@ { "data": { "text/plain": [ - "In: 10; Out: 23; Cache create: 0; Cache read: 0; Total: 33" + "In: 10; Out: 35; Cache create: 0; Cache read: 0; Total: 45" ] }, "execution_count": null, @@ -1022,7 +1022,6 @@ "metadata": {}, "outputs": [], "source": [ - "#| exports\n", "@patch\n", "@delegates(messages.Messages.create)\n", "def __call__(self:Client,\n", @@ -1039,7 +1038,7 @@ " if stream: return self._stream(msgs, prefill=prefill, max_tokens=maxtok, system=sp, temperature=temp, **kwargs)\n", " res = self.c.messages.create(\n", " model=self.model, messages=msgs, max_tokens=maxtok, system=sp, temperature=temp, **kwargs)\n", - " return self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, stop=stop, **kwargs)" + " return self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, **kwargs)" ] }, { @@ -1047,7 +1046,7 @@ "id": "3cee10c8", "metadata": {}, "source": [ - "Defining `__call__` let's us use an object like a function (i.e it's *callable*). We use it as a small wrapper over `messages.create`." + "Defining `__call__` let's us use an object like a function (i.e it's *callable*). We use it as a small wrapper over `messages.create`. However we're not exporting this version just yet -- we have some additions we'll make in a moment..." ] }, { @@ -1095,7 +1094,7 @@ "\n", "
\n", "\n", - "- id: `msg_01FuxeUvfrUw5otuM2xVSAwC`\n", + "- id: `msg_01GvxbaiBfAz4koJSdSzHaUE`\n", "- content: `[{'text': 'Hello! How can I assist you today?', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -1107,7 +1106,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01FuxeUvfrUw5otuM2xVSAwC', content=[TextBlock(text='Hello! How can I assist you today?', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 8; Out: 12; Cache create: 0; Cache read: 0; Total: 20)" + "Message(id='msg_01GvxbaiBfAz4koJSdSzHaUE', content=[TextBlock(text='Hello! How can I assist you today?', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 8; Out: 12; Cache create: 0; Cache read: 0; Total: 20)" ] }, "execution_count": null, @@ -1172,7 +1171,7 @@ "\n", "
\n", "\n", - "- id: `msg_01FfSd87DpgaJadbFV1hVXr2`\n", + "- id: `msg_01XPR1ZmaftuR7PBv8cQ1HUr`\n", "- content: `[{'text': 'According to Douglas Adams, \"The answer to the ultimate question of life, the universe, and everything is 42.\"', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -1184,7 +1183,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01FfSd87DpgaJadbFV1hVXr2', content=[TextBlock(text='According to Douglas Adams, \"The answer to the ultimate question of life, the universe, and everything is 42.\"', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 24; Out: 23; Cache create: 0; Cache read: 0; Total: 47)" + "Message(id='msg_01XPR1ZmaftuR7PBv8cQ1HUr', content=[TextBlock(text='According to Douglas Adams, \"The answer to the ultimate question of life, the universe, and everything is 42.\"', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 24; Out: 23; Cache create: 0; Cache read: 0; Total: 47)" ] }, "execution_count": null, @@ -1303,7 +1302,7 @@ "\n", "
\n", "\n", - "- id: `msg_018QezzgZthSvEUCKSqqJAgz`\n", + "- id: `msg_01J7GmvKSLm872ewk78zy3wW`\n", "- content: `[{'text': '1, 2, 3, 4, ', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -1315,7 +1314,7 @@ "
" ], "text/plain": [ - "Message(id='msg_018QezzgZthSvEUCKSqqJAgz', content=[TextBlock(text='1, 2, 3, 4, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='5', type='message', usage=In: 15; Out: 14; Cache create: 0; Cache read: 0; Total: 29)" + "Message(id='msg_01J7GmvKSLm872ewk78zy3wW', content=[TextBlock(text='1, 2, 3, 4, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='5', type='message', usage=In: 15; Out: 14; Cache create: 0; Cache read: 0; Total: 29)" ] }, "execution_count": null, @@ -1382,7 +1381,7 @@ " 'temp': None,\n", " 'stream': None,\n", " 'stop': None,\n", - " 'result': Message(id='msg_01PNkqfoRMW697iJq3khecJG', content=[TextBlock(text='1, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='2', type='message', usage=In: 15; Out: 5; Cache create: 0; Cache read: 0; Total: 20),\n", + " 'result': Message(id='msg_01YH2byB6G58XHDq4SHjFk2m', content=[TextBlock(text='1, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='2', type='message', usage=In: 15; Out: 5; Cache create: 0; Cache read: 0; Total: 20),\n", " 'use': In: 94; Out: 89; Cache create: 0; Cache read: 0; Total: 183,\n", " 'stop_reason': 'stop_sequence',\n", " 'stop_sequence': '2'}" @@ -1522,12 +1521,12 @@ { "data": { "text/markdown": [ - "ToolUseBlock(id='toolu_015b6PXW26aRCNBMf3GQSoBW', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", + "ToolUseBlock(id='toolu_01PrSX34RFZxCB8DKMxcKmYU', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", "\n", "
\n", "\n", - "- id: `msg_015ycxFL6myW8ETF2AoCcCJF`\n", - "- content: `[{'id': 'toolu_015b6PXW26aRCNBMf3GQSoBW', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", + "- id: `msg_0178h4s4tP6F7CpeK7n3FvBt`\n", + "- content: `[{'id': 'toolu_01PrSX34RFZxCB8DKMxcKmYU', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", "- stop_reason: `tool_use`\n", @@ -1538,7 +1537,7 @@ "
" ], "text/plain": [ - "Message(id='msg_015ycxFL6myW8ETF2AoCcCJF', content=[ToolUseBlock(id='toolu_015b6PXW26aRCNBMf3GQSoBW', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 493; Out: 53; Cache create: 0; Cache read: 0; Total: 546)" + "Message(id='msg_0178h4s4tP6F7CpeK7n3FvBt', content=[ToolUseBlock(id='toolu_01PrSX34RFZxCB8DKMxcKmYU', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 493; Out: 53; Cache create: 0; Cache read: 0; Total: 546)" ] }, "execution_count": null, @@ -1647,7 +1646,7 @@ "data": { "text/plain": [ "{'type': 'tool_result',\n", - " 'tool_use_id': 'toolu_015b6PXW26aRCNBMf3GQSoBW',\n", + " 'tool_use_id': 'toolu_01PrSX34RFZxCB8DKMxcKmYU',\n", " 'content': 7063474}" ] }, @@ -1708,9 +1707,9 @@ "data": { "text/plain": [ "[{'role': 'assistant',\n", - " 'content': \"[ToolUseBlock(id='toolu_015b6PXW26aRCNBMf3GQSoBW', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]\"},\n", + " 'content': \"[ToolUseBlock(id='toolu_01PrSX34RFZxCB8DKMxcKmYU', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]\"},\n", " {'role': 'user',\n", - " 'content': \"[{'type': 'tool_result', 'tool_use_id': 'toolu_015b6PXW26aRCNBMf3GQSoBW', 'content': 7063474}]\"}]" + " 'content': \"[{'type': 'tool_result', 'tool_use_id': 'toolu_01PrSX34RFZxCB8DKMxcKmYU', 'content': 7063474}]\"}]" ] }, "execution_count": null, @@ -1806,14 +1805,166 @@ "source": [ "tools = [get_schema(Dummy.sums)]\n", "o = Dummy()\n", - "\n", - "msgs = mk_msgs(pr)\n", - "r = c(msgs, sp=sp, tools=tools, tool_choice=choice)\n", + "r = c(pr, sp=sp, tools=tools, tool_choice=choice)\n", "tr = mk_toolres(r, obj=o)\n", "msgs += tr\n", "contents(c(msgs, sp=sp, tools=tools))" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "73279877", + "metadata": {}, + "outputs": [], + "source": [ + "#| exports\n", + "@patch\n", + "@delegates(messages.Messages.create)\n", + "def __call__(self:Client,\n", + " msgs:list, # List of messages in the dialog\n", + " sp='', # The system prompt\n", + " temp=0, # Temperature\n", + " maxtok=4096, # Maximum tokens\n", + " prefill='', # Optional prefill to pass to Claude as start of its response\n", + " stream:bool=False, # Stream response?\n", + " stop=None, # Stop sequence\n", + " tools:Optional[list]=None, # List of tools to make available to Claude\n", + " tool_choice:Optional[dict]=None, # Optionally force use of some tool\n", + " **kwargs):\n", + " \"Make a call to Claude.\"\n", + " if tools: kwargs['tools'] = [get_schema(o) for o in listify(tools)]\n", + " if tool_choice and pr: kwargs['tool_choice'] = mk_tool_choice(tool_choice)\n", + " msgs = self._precall(msgs, prefill, stop, kwargs)\n", + " if stream: return self._stream(msgs, prefill=prefill, max_tokens=maxtok, system=sp, temperature=temp, stop=stop, **kwargs)\n", + " res = self.c.messages.create(model=self.model, messages=msgs, max_tokens=maxtok, system=sp, temperature=temp, **kwargs)\n", + " return self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, stop=stop, **kwargs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6dd255e0", + "metadata": {}, + "outputs": [], + "source": [ + "r = c(pr, sp=sp, tools=sums, tool_choice=sums)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "92f1bb9a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "ToolUseBlock(id='toolu_01Kbyf7vCg5FC9hM1nDk3RpP', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", + "\n", + "
\n", + "\n", + "- id: `msg_01S3tfcgcT9AvaosSDM8CLrC`\n", + "- content: `[{'id': 'toolu_01Kbyf7vCg5FC9hM1nDk3RpP', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", + "- model: `claude-3-haiku-20240307`\n", + "- role: `assistant`\n", + "- stop_reason: `tool_use`\n", + "- stop_sequence: `None`\n", + "- type: `message`\n", + "- usage: `{'input_tokens': 489, 'output_tokens': 57, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "\n", + "
" + ], + "text/plain": [ + "Message(id='msg_01S3tfcgcT9AvaosSDM8CLrC', content=[ToolUseBlock(id='toolu_01Kbyf7vCg5FC9hM1nDk3RpP', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 489; Out: 57; Cache create: 0; Cache read: 0; Total: 546)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "r" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f0bb426f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Finding the sum of 604542 and 6458932\n" + ] + } + ], + "source": [ + "tr = mk_toolres(r)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b3564424", + "metadata": {}, + "outputs": [], + "source": [ + "#| exports\n", + "@patch\n", + "@delegates(Client.__call__)\n", + "def structured(self:Client,\n", + " msgs:list, # List of messages in the dialog\n", + " ns:Optional[abc.Mapping]=None, # Namespace to search for tools\n", + " obj:Optional=None, # Class to search for tools\n", + " **kwargs):\n", + " \"Return the value of all tool calls (generally used for structured outputs)\"\n", + " res = self(msgs, **kwargs)\n", + " if ns is None: ns=globals()\n", + " cts = getattr(r, 'content', [])\n", + " tcs = [call_func(o, ns=ns, obj=obj) for o in cts if isinstance(o,ToolUseBlock)]\n", + " return tcs" + ] + }, + { + "cell_type": "markdown", + "id": "e2b70864", + "metadata": {}, + "source": [ + "Anthropic's API does not support response formats directly, so instead we provide a `structured` method to use tool calling to achieve the same result. The result of the tool is not passed back to Claude in this case, but instead is returned directly to the user. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "44d2cc82", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Finding the sum of 604542 and 6458932\n" + ] + }, + { + "data": { + "text/plain": [ + "[7063474]" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "c.structured(pr)" + ] + }, { "cell_type": "markdown", "id": "4ea144b8", @@ -1952,9 +2103,8 @@ " prefill='', # Optional prefill to pass to Claude as start of its response\n", " **kw):\n", " self._append_pr(pr)\n", - " if self.tools: kw['tools'] = [get_schema(o) for o in self.tools]\n", - " if self.tool_choice and pr: kw['tool_choice'] = mk_tool_choice(self.tool_choice)\n", - " res = self.c(self.h, stream=stream, prefill=prefill, sp=self.sp, temp=temp, maxtok=maxtok, **kw)\n", + " res = self.c(self.h, stream=stream, prefill=prefill, sp=self.sp, temp=temp, maxtok=maxtok,\n", + " tools=self.tools, tool_choice=self.tool_choice, **kw)\n", " if stream: return self._stream(res)\n", " self.h += mk_toolres(self.c.result, ns=self.tools, obj=self)\n", " return res" diff --git a/claudette/_modidx.py b/claudette/_modidx.py index 81ded22..739d85c 100644 --- a/claudette/_modidx.py +++ b/claudette/_modidx.py @@ -28,6 +28,7 @@ 'claudette.core.Client._precall': ('core.html#client._precall', 'claudette/core.py'), 'claudette.core.Client._r': ('core.html#client._r', 'claudette/core.py'), 'claudette.core.Client._stream': ('core.html#client._stream', 'claudette/core.py'), + 'claudette.core.Client.structured': ('core.html#client.structured', 'claudette/core.py'), 'claudette.core.Message._repr_markdown_': ('core.html#message._repr_markdown_', 'claudette/core.py'), 'claudette.core.Usage.__add__': ('core.html#usage.__add__', 'claudette/core.py'), 'claudette.core.Usage.__repr__': ('core.html#usage.__repr__', 'claudette/core.py'), diff --git a/claudette/core.py b/claudette/core.py index 5dd4228..87a7d93 100644 --- a/claudette/core.py +++ b/claudette/core.py @@ -134,25 +134,6 @@ def _precall(self:Client, msgs, prefill, stop, kwargs): msgs = mk_msgs(msgs+pref) return msgs -# %% ../00_core.ipynb -@patch -@delegates(messages.Messages.create) -def __call__(self:Client, - msgs:list, # List of messages in the dialog - sp='', # The system prompt - temp=0, # Temperature - maxtok=4096, # Maximum tokens - prefill='', # Optional prefill to pass to Claude as start of its response - stream:bool=False, # Stream response? - stop=None, # Stop sequence - **kwargs): - "Make a call to Claude." - msgs = self._precall(msgs, prefill, stop, kwargs) - if stream: return self._stream(msgs, prefill=prefill, max_tokens=maxtok, system=sp, temperature=temp, **kwargs) - res = self.c.messages.create( - model=self.model, messages=msgs, max_tokens=maxtok, system=sp, temperature=temp, **kwargs) - return self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, stop=stop, **kwargs) - # %% ../00_core.ipynb def mk_tool_choice(choose:Union[str,bool,None])->dict: "Create a `tool_choice` dict that's 'auto' if `choose` is `None`, 'any' if it is True, or 'tool' otherwise" @@ -193,6 +174,43 @@ def mk_toolres( if tcs: res.append(mk_msg(tcs)) return res +# %% ../00_core.ipynb +@patch +@delegates(messages.Messages.create) +def __call__(self:Client, + msgs:list, # List of messages in the dialog + sp='', # The system prompt + temp=0, # Temperature + maxtok=4096, # Maximum tokens + prefill='', # Optional prefill to pass to Claude as start of its response + stream:bool=False, # Stream response? + stop=None, # Stop sequence + tools:Optional[list]=None, # List of tools to make available to Claude + tool_choice:Optional[dict]=None, # Optionally force use of some tool + **kwargs): + "Make a call to Claude." + if tools: kwargs['tools'] = [get_schema(o) for o in listify(tools)] + if tool_choice and pr: kwargs['tool_choice'] = mk_tool_choice(tool_choice) + msgs = self._precall(msgs, prefill, stop, kwargs) + if stream: return self._stream(msgs, prefill=prefill, max_tokens=maxtok, system=sp, temperature=temp, stop=stop, **kwargs) + res = self.c.messages.create(model=self.model, messages=msgs, max_tokens=maxtok, system=sp, temperature=temp, **kwargs) + return self._log(res, prefill, msgs, maxtok, sp, temp, stream=stream, stop=stop, **kwargs) + +# %% ../00_core.ipynb +@patch +@delegates(Client.__call__) +def structured(self:Client, + msgs:list, # List of messages in the dialog + ns:Optional[abc.Mapping]=None, # Namespace to search for tools + obj:Optional=None, # Class to search for tools + **kwargs): + "Return the value of all tool calls (generally used for structured outputs)" + res = self(msgs, **kwargs) + if ns is None: ns=globals() + cts = getattr(r, 'content', []) + tcs = [call_func(o, ns=ns, obj=obj) for o in cts if isinstance(o,ToolUseBlock)] + return tcs + # %% ../00_core.ipynb class Chat: def __init__(self, @@ -245,9 +263,8 @@ def __call__(self:Chat, prefill='', # Optional prefill to pass to Claude as start of its response **kw): self._append_pr(pr) - if self.tools: kw['tools'] = [get_schema(o) for o in self.tools] - if self.tool_choice and pr: kw['tool_choice'] = mk_tool_choice(self.tool_choice) - res = self.c(self.h, stream=stream, prefill=prefill, sp=self.sp, temp=temp, maxtok=maxtok, **kw) + res = self.c(self.h, stream=stream, prefill=prefill, sp=self.sp, temp=temp, maxtok=maxtok, + tools=self.tools, tool_choice=self.tool_choice, **kw) if stream: return self._stream(res) self.h += mk_toolres(self.c.result, ns=self.tools, obj=self) return res