Skip to content

Commit

Permalink
[azopenaiextensions] Updating to v0.1.0-alpha.37. Breaking change, on…
Browse files Browse the repository at this point in the history
…ly affects our tests. (Azure#23665)

Breaking change, only affects our tests.
  • Loading branch information
richardpark-msft authored Nov 19, 2024
1 parent 6cfdc39 commit b1733e0
Show file tree
Hide file tree
Showing 8 changed files with 230 additions and 165 deletions.
2 changes: 2 additions & 0 deletions sdk/ai/azopenaiextensions/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@

### Other Changes

- Updating to `v0.1.0-alpha.37` of the [OpenAI go module](https://github.com/openai/openai-go).

## 0.1.0 (2024-10-14)

### Features Added
Expand Down
3 changes: 3 additions & 0 deletions sdk/ai/azopenaiextensions/ci.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file.

## ai-extensions

trigger:
branches:
include:
Expand Down
307 changes: 170 additions & 137 deletions sdk/ai/azopenaiextensions/client_assistants_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,178 +21,211 @@ func TestAssistants(t *testing.T) {
t.Skip("https://github.com/Azure/azure-sdk-for-go/issues/22869")
}

// NOTE: if you want to hit the OpenAI service instead...
// assistantClient := openai.NewClient(
// option.WithHeader("OpenAI-Beta", "assistants=v2"),
// )
assistantClient := newStainlessTestClient(t, azureOpenAI.Assistants.Endpoint).Beta.Assistants

assistant, err := assistantClient.New(context.Background(), openai.BetaAssistantNewParams{
Model: openai.F(azureOpenAI.Assistants.Model),
Instructions: openai.String("Answer questions in any manner possible"),
})
require.NoError(t, err)

t.Cleanup(func() {
_, err := assistantClient.Delete(context.Background(), assistant.ID)
require.NoError(t, err)
})

const desc = "This is a newly updated description"

// update the assistant's description
{
updatedAssistant, err := assistantClient.Update(context.Background(), assistant.ID, openai.BetaAssistantUpdateParams{
Description: openai.String(desc),
testFn := func(t *testing.T, useAPIKey bool) {
assistantClient := newStainlessTestClientWithOptions(t, azureOpenAI.Assistants.Endpoint, &stainlessTestClientOptions{
UseAPIKey: useAPIKey,
}).Beta.Assistants

assistant, err := assistantClient.New(context.Background(), openai.BetaAssistantNewParams{
Model: openai.F(azureOpenAI.Assistants.Model),
Instructions: openai.String("Answer questions in any manner possible"),
})
require.NoError(t, err)
require.Equal(t, desc, updatedAssistant.Description)
require.Equal(t, assistant.ID, updatedAssistant.ID)
}

// get the same assistant back again
{
assistant2, err := assistantClient.Get(context.Background(), assistant.ID)
require.NoError(t, err)
require.Equal(t, assistant.ID, assistant2.ID)
require.Equal(t, desc, assistant2.Description)
}

// listing assistants
{
pager, err := assistantClient.List(context.Background(), openai.BetaAssistantListParams{
After: openai.F(assistant.ID),
Limit: openai.Int(1),
t.Cleanup(func() {
_, err := assistantClient.Delete(context.Background(), assistant.ID)
require.NoError(t, err)
})
require.NoError(t, err)

var pages []openai.Assistant = pager.Data
require.NotEmpty(t, pages)
const desc = "This is a newly updated description"

page, err := pager.GetNextPage()
require.NoError(t, err)
// update the assistant's description
{
updatedAssistant, err := assistantClient.Update(context.Background(), assistant.ID, openai.BetaAssistantUpdateParams{
Description: openai.String(desc),
})
require.NoError(t, err)
require.Equal(t, desc, updatedAssistant.Description)
require.Equal(t, assistant.ID, updatedAssistant.ID)
}

if page != nil { // a nil page indicates we've read all pages.
pages = append(pages, page.Data...)
// get the same assistant back again
{
assistant2, err := assistantClient.Get(context.Background(), assistant.ID)
require.NoError(t, err)
require.Equal(t, assistant.ID, assistant2.ID)
require.Equal(t, desc, assistant2.Description)
}

require.NotEmpty(t, pages)
// listing assistants
{
pager, err := assistantClient.List(context.Background(), openai.BetaAssistantListParams{
After: openai.F(assistant.ID),
Limit: openai.Int(1),
})
require.NoError(t, err)

var pages []openai.Assistant = pager.Data
require.NotEmpty(t, pages)

page, err := pager.GetNextPage()
require.NoError(t, err)

if page != nil { // a nil page indicates we've read all pages.
pages = append(pages, page.Data...)
}

require.NotEmpty(t, pages)
}
}

t.Run("APIKey", func(t *testing.T) {
testFn(t, true)
})

t.Run("TokenCredential", func(t *testing.T) {
testFn(t, false)
})
}

func TestAssistantsThreads(t *testing.T) {
if recording.GetRecordMode() == recording.PlaybackMode {
t.Skip("https://github.com/Azure/azure-sdk-for-go/issues/22869")
}

// NOTE: if you want to hit the OpenAI service instead...
// assistantClient := openai.NewClient(
// option.WithHeader("OpenAI-Beta", "assistants=v2"),
// )
beta := newStainlessTestClient(t, azureOpenAI.Assistants.Endpoint).Beta
assistantClient := beta.Assistants
threadClient := beta.Threads

assistant, err := assistantClient.New(context.Background(), openai.BetaAssistantNewParams{
Model: openai.F(azureOpenAI.Assistants.Model),
Instructions: openai.String("Answer questions in any manner possible"),
})
require.NoError(t, err)

t.Cleanup(func() {
_, err := assistantClient.Delete(context.Background(), assistant.ID)
testFn := func(t *testing.T, useAPIKey bool) {
// NOTE: if you want to hit the OpenAI service instead...
// assistantClient := openai.NewClient(
// option.WithHeader("OpenAI-Beta", "assistants=v2"),
// )
beta := newStainlessTestClientWithOptions(t, azureOpenAI.Assistants.Endpoint, &stainlessTestClientOptions{
UseAPIKey: useAPIKey,
}).Beta
assistantClient := beta.Assistants
threadClient := beta.Threads

assistant, err := assistantClient.New(context.Background(), openai.BetaAssistantNewParams{
Model: openai.F(azureOpenAI.Assistants.Model),
Instructions: openai.String("Answer questions in any manner possible"),
})
require.NoError(t, err)
})

thread, err := threadClient.New(context.Background(), openai.BetaThreadNewParams{})
require.NoError(t, err)
t.Cleanup(func() {
_, err := assistantClient.Delete(context.Background(), assistant.ID)
require.NoError(t, err)
})

t.Cleanup(func() {
_, err := threadClient.Delete(context.Background(), thread.ID)
thread, err := threadClient.New(context.Background(), openai.BetaThreadNewParams{})
require.NoError(t, err)
})

metadata := map[string]any{"hello": "world"}

// update the thread
{
updatedThread, err := threadClient.Update(context.Background(), thread.ID, openai.BetaThreadUpdateParams{
Metadata: openai.F[any](metadata),
t.Cleanup(func() {
_, err := threadClient.Delete(context.Background(), thread.ID)
require.NoError(t, err)
})
require.NoError(t, err)
require.Equal(t, thread.ID, updatedThread.ID)
require.Equal(t, metadata, updatedThread.Metadata)
}

// get the thread back
{
gotThread, err := threadClient.Get(context.Background(), thread.ID)
require.NoError(t, err)
require.Equal(t, thread.ID, gotThread.ID)
require.Equal(t, metadata, gotThread.Metadata)
metadata := map[string]any{"hello": "world"}

// update the thread
{
updatedThread, err := threadClient.Update(context.Background(), thread.ID, openai.BetaThreadUpdateParams{
Metadata: openai.F[any](metadata),
})
require.NoError(t, err)
require.Equal(t, thread.ID, updatedThread.ID)
require.Equal(t, metadata, updatedThread.Metadata)
}

// get the thread back
{
gotThread, err := threadClient.Get(context.Background(), thread.ID)
require.NoError(t, err)
require.Equal(t, thread.ID, gotThread.ID)
require.Equal(t, metadata, gotThread.Metadata)
}
}

t.Run("APIKey", func(t *testing.T) {
testFn(t, true)
})

t.Run("TokenCredential", func(t *testing.T) {
testFn(t, false)
})
}

func TestAssistantRun(t *testing.T) {
if recording.GetRecordMode() == recording.PlaybackMode {
t.Skip("https://github.com/Azure/azure-sdk-for-go/issues/22869")
}

// NOTE: if you want to hit the OpenAI service instead...
// assistantClient := openai.NewClient(
// option.WithHeader("OpenAI-Beta", "assistants=v2"),
// )
client := newStainlessTestClient(t, azureOpenAI.Assistants.Endpoint)

// (this is the test, verbatim, from openai-go: https://github.com/openai/openai-go/blob/main/examples/assistant-streaming/main.go)

ctx := context.Background()

// Create an assistant
println("Create an assistant")
assistant, err := client.Beta.Assistants.New(ctx, openai.BetaAssistantNewParams{
Name: openai.String("Math Tutor"),
Instructions: openai.String("You are a personal math tutor. Write and run code to answer math questions."),
Tools: openai.F([]openai.AssistantToolUnionParam{
openai.CodeInterpreterToolParam{Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter)},
}),
Model: openai.String("gpt-4-1106-preview"),
})
if err != nil {
panic(err)
}
testFn := func(t *testing.T, useAPIKey bool) {
// NOTE: if you want to hit the OpenAI service instead...
// assistantClient := openai.NewClient(
// option.WithHeader("OpenAI-Beta", "assistants=v2"),
// )

// Create a thread
println("Create an thread")
thread, err := client.Beta.Threads.New(ctx, openai.BetaThreadNewParams{})
if err != nil {
panic(err)
}
client := newStainlessTestClientWithOptions(t, azureOpenAI.Assistants.Endpoint, &stainlessTestClientOptions{
UseAPIKey: useAPIKey,
})

// Create a message in the thread
println("Create a message")
_, err = client.Beta.Threads.Messages.New(ctx, thread.ID, openai.BetaThreadMessageNewParams{
Role: openai.F(openai.BetaThreadMessageNewParamsRoleAssistant),
Content: openai.F([]openai.MessageContentPartParamUnion{
openai.TextContentBlockParam{
Type: openai.F(openai.TextContentBlockParamTypeText),
Text: openai.String("I need to solve the equation `3x + 11 = 14`. Can you help me?"),
},
}),
})
if err != nil {
panic(err)
}
// (this is the test, verbatim, from openai-go: https://github.com/openai/openai-go/blob/main/examples/assistant-streaming/main.go)

// Create a run
println("Create a run")
stream := client.Beta.Threads.Runs.NewStreaming(ctx, thread.ID, openai.BetaThreadRunNewParams{
AssistantID: openai.String(assistant.ID),
Instructions: openai.String("Please address the user as Jane Doe. The user has a premium account."),
})
ctx := context.Background()

for stream.Next() {
evt := stream.Current()
println(fmt.Sprintf("%T", evt.Data))
// Create an assistant
println("Create an assistant")
assistant, err := client.Beta.Assistants.New(ctx, openai.BetaAssistantNewParams{
Name: openai.String("Math Tutor"),
Instructions: openai.String("You are a personal math tutor. Write and run code to answer math questions."),
Tools: openai.F([]openai.AssistantToolUnionParam{
openai.CodeInterpreterToolParam{Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter)},
}),
Model: openai.F[openai.ChatModel](azureOpenAI.Assistants.Model),
})

if err != nil {
panic(err)
}

// Create a thread
println("Create an thread")
thread, err := client.Beta.Threads.New(ctx, openai.BetaThreadNewParams{})
if err != nil {
panic(err)
}

// Create a message in the thread
println("Create a message")
_, err = client.Beta.Threads.Messages.New(ctx, thread.ID, openai.BetaThreadMessageNewParams{
Role: openai.F(openai.BetaThreadMessageNewParamsRoleAssistant),
Content: openai.F([]openai.MessageContentPartParamUnion{
openai.TextContentBlockParam{
Type: openai.F(openai.TextContentBlockParamTypeText),
Text: openai.String("I need to solve the equation `3x + 11 = 14`. Can you help me?"),
},
}),
})
if err != nil {
panic(err)
}

// Create a run
println("Create a run")
stream := client.Beta.Threads.Runs.NewStreaming(ctx, thread.ID, openai.BetaThreadRunNewParams{
AssistantID: openai.String(assistant.ID),
Instructions: openai.String("Please address the user as Jane Doe. The user has a premium account."),
})

for stream.Next() {
evt := stream.Current()
println(fmt.Sprintf("%T", evt.Data))
}
}
t.Run("APIKey", func(t *testing.T) {
testFn(t, true)
})

t.Run("TokenCredential", func(t *testing.T) {
testFn(t, false)
})
}
2 changes: 1 addition & 1 deletion sdk/ai/azopenaiextensions/client_functions_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ func TestGetChatCompletions_usingFunctions(t *testing.T) {
// all of these variants use the tool provided - auto just also works since we did provide
// a tool reference and ask a question to use it.
{Model: azureOpenAI.ChatCompletions.Model, ToolChoice: nil},
{Model: azureOpenAI.ChatCompletions.Model, ToolChoice: openai.ChatCompletionToolChoiceOptionStringAuto},
{Model: azureOpenAI.ChatCompletions.Model, ToolChoice: openai.ChatCompletionToolChoiceOptionBehaviorAuto},
{Model: azureOpenAI.ChatCompletions.Model, ToolChoice: openai.ChatCompletionNamedToolChoiceParam{
Type: openai.F(openai.ChatCompletionNamedToolChoiceTypeFunction),
Function: openai.F(openai.ChatCompletionNamedToolChoiceFunctionParam{
Expand Down
Loading

0 comments on commit b1733e0

Please sign in to comment.