Skip to content

Commit

Permalink
fix backend missing var error and fully support stream event for anth…
Browse files Browse the repository at this point in the history
…ropic
  • Loading branch information
dieriba committed Nov 8, 2024
1 parent 2e0cf44 commit e0ebb4e
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 24 deletions.
1 change: 1 addition & 0 deletions backend/windmill-api/src/ai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,7 @@ mod anthropic {

pub fn prepare_request(self, anthropic_path: &str, body: Bytes) -> Result<RequestBuilder> {
let AnthropicCache { api_key } = self;
let url = format!("{}/{}", ANTHROPIC_BASE_API_URL, anthropic_path);
let request = HTTP_CLIENT
.post(url)
.header("x-api-key", api_key)
Expand Down
8 changes: 2 additions & 6 deletions frontend/src/lib/components/copilot/MetadataGen.svelte
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<script lang="ts">
import { getCompletion, isChatCompletionChunk, type AiProviderTypes } from './lib'
import { getCompletion, getResponseFromEvent, type AiProviderTypes } from './lib'
import { isInitialCode } from '$lib/script_helpers'
import { Check, Loader2, Wand2 } from 'lucide-svelte'
import { copilotInfo, metadataCompletionEnabled } from '$lib/stores'
Expand Down Expand Up @@ -122,11 +122,7 @@ Generate a description for the flow below:
const response = await getCompletion(messages, abortController, aiProvider)
generatedContent = ''
for await (const chunk of response) {
console.log({ chunk })
if (isChatCompletionChunk(chunk)) {
const toks = chunk.choices[0]?.delta?.content || ''
generatedContent += toks
}
generatedContent += getResponseFromEvent(chunk)
}
} catch (err) {
if (!abortController.signal.aborted) {
Expand Down
55 changes: 37 additions & 18 deletions frontend/src/lib/components/copilot/lib.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import type {
} from 'openai/resources/index.mjs'

import type { MessageCreateParams, MessageParam } from '@anthropic-ai/sdk/resources/messages.mjs'
import type { Stream } from '@anthropic-ai/sdk/streaming.mjs'

export const SUPPORTED_LANGUAGES = new Set(Object.keys(GEN_CONFIG.prompts))

Expand All @@ -32,7 +31,6 @@ const anthropicConfig: MessageCreateParams = {
temperature: 0,
max_tokens: 8192,
model: 'claude-3-5-sonnet-20241022',
stream: true,
messages: []
}

Expand Down Expand Up @@ -366,15 +364,24 @@ export async function getCompletion(
system = messages[0].content as string
messages.shift()
}

const anthropicMessages: MessageParam[] = messages.map((message) => {
return {
role: message.role == 'user' ? 'user' : 'assistant',
content: message.content as string
}
})

const completion = await anthropicClient.messages.create(
{
...anthropicConfig,
system,
messages: messages as MessageParam[]
messages: anthropicMessages,
stream: true
},
{ signal: abortController.signal }
)
return completion as Stream<Anthropic.Messages.RawMessageStreamEvent>
return completion
}

case 'openai': {
Expand All @@ -394,18 +401,40 @@ export async function getCompletion(
}
}

/*
May be needed in case of adding support of new ai provider (only supporting anthropic and openai for now),
because it will help to find from which Ai provider the streamed response comes.
If used, use it in getResponseFromEvent function that is just below
function isRawMessageStreamEvent(
message: Anthropic.Messages.RawMessageStreamEvent | OpenAI.Chat.Completions.ChatCompletionChunk
): message is Anthropic.Messages.RawMessageStreamEvent {
return 'type' in message
}

export function isChatCompletionChunk(
*/
function isChatCompletionChunk(
response: Anthropic.Messages.RawMessageStreamEvent | OpenAI.Chat.Completions.ChatCompletionChunk
): response is OpenAI.Chat.Completions.ChatCompletionChunk {
return 'choices' in response
}

export function getResponseFromEvent(
part: Anthropic.Messages.RawMessageStreamEvent | OpenAI.Chat.Completions.ChatCompletionChunk
): string {
if (isChatCompletionChunk(part)) {
return part.choices[0]?.delta?.content || ''
}
let response = ''
if (part.type == 'content_block_delta') {
if (part.delta.type == 'text_delta') {
response = part.delta.text
} else {
response = part.delta.partial_json
}
}
return response
}

export async function copilot(
scriptOptions: CopilotOptions,
generatedCode: Writable<string>,
Expand Down Expand Up @@ -433,15 +462,7 @@ export async function copilot(
let response = ''
let code = ''
for await (const part of completion) {
if (isChatCompletionChunk(part)) {
response += part.choices[0]?.delta?.content || ''
} else if (part.type == 'content_block_delta') {
if (part.delta.type == 'text_delta') {
response += part.delta.text
} else {
response += part.delta.partial_json
}
}
response += getResponseFromEvent(part)
let match = response.match(/```[a-zA-Z]+\n([\s\S]*?)\n```/)

if (match) {
Expand Down Expand Up @@ -516,9 +537,7 @@ export async function deltaCodeCompletion(
let code = ''
let delta = ''
for await (const part of completion) {
if (isChatCompletionChunk(part)) {
response += part.choices[0]?.delta?.content || ''
}
response += getResponseFromEvent(part)
let match = response.match(/```[a-zA-Z]+\n([\s\S]*?)\n```/)

if (match) {
Expand Down

0 comments on commit e0ebb4e

Please sign in to comment.