Skip to content

Commit

Permalink
wip: fixing anthropic
Browse files Browse the repository at this point in the history
  • Loading branch information
dieriba committed Nov 8, 2024
1 parent 6f30a22 commit 8bd97d2
Show file tree
Hide file tree
Showing 6 changed files with 12,100 additions and 11,741 deletions.
9 changes: 2 additions & 7 deletions backend/windmill-api/src/ai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ mod openai {
}

pub fn prepare_request(self, openai_path: &str, mut body: Bytes) -> Result<RequestBuilder> {
println!("In openai");
let OpenaiCache { api_key, azure_base_path, organization_id, user } = self;
if user.is_some() {
tracing::debug!("Adding user to request body");
Expand All @@ -84,8 +83,6 @@ mod openai {
.into();
}

let cp = body.clone();
println!("{:#?}", body);
let base_url = if let Some(base_url) = azure_base_path {
base_url
} else {
Expand Down Expand Up @@ -248,16 +245,14 @@ mod anthropic {

pub fn prepare_request(self, anthropic_path: &str, body: Bytes) -> Result<RequestBuilder> {
let AnthropicCache { api_key } = self;
println!("In anthropic");
let url = format!("{}/{}", ANTHROPIC_BASE_API_URL, anthropic_path);
println!("{}, {}", &url, &api_key);
let body_clone = body.clone();
let request = HTTP_CLIENT
.post(url)
.header("x-api-key", api_key)
.header("anthropic-version", API_VERSION)
.header("content-type", "application/json")
.body(body);
println!("{:#?}", request);
Ok(request)
}
}
Expand Down Expand Up @@ -429,14 +424,14 @@ async fn proxy(
ai_cache
}
};
println!("{:#?}", ai_cache);
let (path, request) = match ai_cache {
KeyCache::Openai(cached) => ("openai_path", cached.prepare_request(&ai_path, body)),
KeyCache::Anthropic(cached) => ("anthropic_path", cached.prepare_request(&ai_path, body)),
};

let response = request?.send().await.map_err(to_anyhow)?;


let mut tx = db.begin().await?;

audit_log(
Expand Down
1 change: 1 addition & 0 deletions frontend/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ node_modules
/package
CaddyfileRemoteRuben
tests-out/
/src/lib/gen
storageState.json
.env.production
dist/
Expand Down
1 change: 1 addition & 0 deletions frontend/src/lib/components/copilot/MetadataGen.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ Generate a description for the flow below:
const response = await getCompletion(messages, abortController, aiProvider)
generatedContent = ''
for await (const chunk of response) {
console.log({ chunk })
if (isChatCompletionChunk(chunk)) {
const toks = chunk.choices[0]?.delta?.content || ''
generatedContent += toks
Expand Down
9 changes: 5 additions & 4 deletions frontend/src/lib/components/copilot/lib.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ const openaiConfig: ChatCompletionCreateParamsStreaming = {

const anthropicConfig: MessageCreateParams = {
temperature: 0,
max_tokens: 16384,
max_tokens: 8192,
model: 'claude-3-5-sonnet-20241022',
stream: true,
messages: []
Expand Down Expand Up @@ -341,16 +341,15 @@ export async function getNonStreamingCompletion(
system = messages[0].content as string
messages.shift()
}
const completion = await anthropicClient.messages.create(
const message = await anthropicClient.messages.create(
{
...anthropicConfig,
system,
messages: messages as MessageParam[]
},
queryOptions
)

console.log(messages)
console.log({ message })
return ''
}
}
Expand Down Expand Up @@ -439,6 +438,8 @@ export async function copilot(
for await (const part of completion) {
if (isChatCompletionChunk(part)) {
response += part.choices[0]?.delta?.content || ''
console.log({ part })

}
let match = response.match(/```[a-zA-Z]+\n([\s\S]*?)\n```/)

Expand Down
Loading

0 comments on commit 8bd97d2

Please sign in to comment.