Skip to content

Commit

Permalink
Formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
peterbanda committed Jan 28, 2025
1 parent 5420d02 commit dcb109c
Show file tree
Hide file tree
Showing 16 changed files with 121 additions and 52 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ object Content {
startCharIndex: Option[Int],
endCharIndex: Option[Int],
startBlockIndex: Option[Int],
endBlockIndex: Option[Int],
endBlockIndex: Option[Int]
)

case class MediaBlock(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@ case class CitationsFlagRaw(
case class TextContentRaw(
`type`: String,
text: String
)
)
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ final case class CreateMessageResponse(
textsWithCitations.map(_._2)

def textsWithCitations: Seq[(String, Seq[Citation])] =
content.blocks.collect { case ContentBlockBase(TextBlock(text, citations), _) => (text, citations) }
content.blocks.collect { case ContentBlockBase(TextBlock(text, citations), _) =>
(text, citations)
}

def text: String = texts.mkString("")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@ package io.cequence.openaiscala.anthropic.service.impl

import akka.NotUsed
import akka.stream.scaladsl.Source
import io.cequence.openaiscala.anthropic.domain.response.{ContentBlockDelta, CreateMessageResponse}
import io.cequence.openaiscala.anthropic.domain.response.{
ContentBlockDelta,
CreateMessageResponse
}
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
import io.cequence.openaiscala.anthropic.domain.Message
import io.cequence.wsclient.ResponseImplicits.JsonSafeOps
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ object OpenAIChatCompletionStreamedOutputConversionAdapter {
.createChatCompletionStreamed(
messages,
settings
).via(conversionStream(messageConversion))
)
.via(conversionStream(messageConversion))

private def conversionStream(
messageProcessingFlow: Flow[Seq[ChunkMessageSpec], Seq[ChunkMessageSpec], NotUsed]
Expand All @@ -57,10 +58,10 @@ object OpenAIChatCompletionStreamedOutputConversionAdapter {
val mergeBack = Flow[(ChatCompletionChunkResponse, Seq[ChunkMessageSpec])].map {
case (response, updatedChoices) =>
response.copy(
choices = response.choices.zip(updatedChoices).map {
case (choice, updatedChoice) =>
choices =
response.choices.zip(updatedChoices).map { case (choice, updatedChoice) =>
choice.copy(delta = updatedChoice)
}
}
)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,8 @@ trait ChatCompletionBodyMaker {
Param.store -> settingsFinal.store,
Param.reasoning_effort -> settingsFinal.reasoning_effort.map(_.toString()),
Param.service_tier -> settingsFinal.service_tier.map(_.toString()),
Param.metadata -> (if (settingsFinal.metadata.nonEmpty) Some(settingsFinal.metadata) else None),
Param.metadata -> (if (settingsFinal.metadata.nonEmpty) Some(settingsFinal.metadata)
else None),
Param.extra_params -> {
if (settingsFinal.extra_params.nonEmpty) Some(settingsFinal.extra_params) else None
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,22 @@ import io.cequence.openaiscala.domain.Batch._
import io.cequence.openaiscala.domain.ChunkingStrategy.StaticChunkingStrategy
import io.cequence.openaiscala.domain.FineTune.WeightsAndBiases
import io.cequence.openaiscala.domain.ThreadAndRun.Content.ContentBlock.ImageDetail
import io.cequence.openaiscala.domain.settings.{ChatCompletionResponseFormatType, ReasoningEffort, ServiceTier}
import io.cequence.openaiscala.domain.settings.{
ChatCompletionResponseFormatType,
ReasoningEffort,
ServiceTier
}
import io.cequence.openaiscala.domain.Run.TruncationStrategy
import io.cequence.openaiscala.domain.StepDetail.{MessageCreation, ToolCalls}
import io.cequence.openaiscala.domain.response.AssistantToolResourceResponse.{CodeInterpreterResourcesResponse, FileSearchResourcesResponse}
import io.cequence.openaiscala.domain.response.ResponseFormat.{JsonObjectResponse, StringResponse, TextResponse}
import io.cequence.openaiscala.domain.response.AssistantToolResourceResponse.{
CodeInterpreterResourcesResponse,
FileSearchResourcesResponse
}
import io.cequence.openaiscala.domain.response.ResponseFormat.{
JsonObjectResponse,
StringResponse,
TextResponse
}
import io.cequence.openaiscala.domain.response._
import io.cequence.openaiscala.domain.settings.JsonSchemaDef
import io.cequence.openaiscala.domain.{ThreadMessageFile, _}
Expand Down Expand Up @@ -40,7 +51,8 @@ object JsonFormats {
Format(reads, writes)
}

implicit lazy val completionTokenDetailsFormat: Format[CompletionTokenDetails] = Json.format[CompletionTokenDetails]
implicit lazy val completionTokenDetailsFormat: Format[CompletionTokenDetails] =
Json.format[CompletionTokenDetails]

implicit lazy val usageInfoFormat: Format[UsageInfo] = Json.format[UsageInfo]

Expand Down Expand Up @@ -307,7 +319,8 @@ object JsonFormats {
}
}

implicit val chatCompletionResponseFormatTypeFormat: Format[ChatCompletionResponseFormatType] = enumFormat[ChatCompletionResponseFormatType](
implicit val chatCompletionResponseFormatTypeFormat
: Format[ChatCompletionResponseFormatType] = enumFormat[ChatCompletionResponseFormatType](
ChatCompletionResponseFormatType.json_object,
ChatCompletionResponseFormatType.json_schema,
ChatCompletionResponseFormatType.text
Expand Down Expand Up @@ -1279,4 +1292,4 @@ object JsonFormats {
Format(eitherJsonSchemaReads, eitherJsonSchemaWrites)

implicit val jsonSchemaDefFormat: Format[JsonSchemaDef] = Json.format[JsonSchemaDef]
}
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
package io.cequence.openaiscala.domain.response

import io.cequence.openaiscala.OpenAIScalaClientException
import io.cequence.openaiscala.domain.{AssistantFunMessage, AssistantMessage, AssistantToolMessage, BaseMessage, ChatRole}
import io.cequence.openaiscala.domain.{
AssistantFunMessage,
AssistantMessage,
AssistantToolMessage,
BaseMessage,
ChatRole
}

import java.{util => ju}

Expand Down Expand Up @@ -30,9 +36,13 @@ case class ChatCompletionResponse(
ChatCompletionChoiceInfo
] {

def contentHead: String = choices.headOption.map(_.message.content).getOrElse(
throw new OpenAIScalaClientException(s"No content in the chat completion response ${id}.")
)
def contentHead: String = choices.headOption
.map(_.message.content)
.getOrElse(
throw new OpenAIScalaClientException(
s"No content in the chat completion response ${id}."
)
)
}

case class ChatToolCompletionResponse(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ object ChatCompletionSettingsConversions {
"O1 models don't support parallel tool calls, converting to None."
),
warning = true
),
)
)

private val o1PreviewConversions =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,12 @@ package io.cequence.openaiscala.service.adapter
import akka.NotUsed
import akka.stream.scaladsl.Flow
import io.cequence.openaiscala.domain.response.ChunkMessageSpec
import io.cequence.openaiscala.domain.{AssistantMessage, BaseMessage, SystemMessage, UserMessage}
import io.cequence.openaiscala.domain.{
AssistantMessage,
BaseMessage,
SystemMessage,
UserMessage
}
import org.slf4j.LoggerFactory

object MessageConversions {
Expand Down Expand Up @@ -54,7 +59,8 @@ object MessageConversions {
if (foundEnd) {
List(messages)
} else {
val endFoundInThisChunk = messages.exists(_.content.exists(_.trim.matches(thinkEndTagRegex)))
val endFoundInThisChunk =
messages.exists(_.content.exists(_.trim.matches(thinkEndTagRegex)))

if (endFoundInThisChunk) {
foundEnd = true
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package io.cequence.openaiscala.examples

import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.domain.settings.{CreateChatCompletionSettings, ServiceTier}
import io.cequence.openaiscala.domain._

import scala.concurrent.Future
Expand All @@ -17,12 +17,15 @@ object CreateChatCompletion extends Example {
.createChatCompletion(
messages = messages,
settings = CreateChatCompletionSettings(
model = ModelId.o1_mini,
model = ModelId.gpt_4o,
temperature = Some(0),
max_tokens = Some(4000)
max_tokens = Some(4000),
service_tier = Some(ServiceTier.auto),
metadata = Map()
)
)
.map { content =>
printMessageContent(content)
.map { response =>
println(response.usage.get)
printMessageContent(response)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,5 @@ object CreateChatCompletionLongResponse extends Example {
temperature = Some(0)
)
)
.map { content =>
printMessageContent(content)
}
.map(printMessageContent)
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
package io.cequence.openaiscala.examples

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.{ChatCompletionResponseFormatType, CreateChatCompletionSettings, ReasoningEffort}
import io.cequence.openaiscala.domain.settings.{
ChatCompletionResponseFormatType,
CreateChatCompletionSettings,
ReasoningEffort
}

import scala.concurrent.Future

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ package io.cequence.openaiscala.examples
import io.cequence.openaiscala.domain.AssistantTool.FunctionTool
import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import play.api.libs.json.{JsObject, Json}
import io.cequence.openaiscala.JsonFormats._

import scala.concurrent.Future

Expand All @@ -17,20 +19,20 @@ object CreateChatToolCompletion extends Example {
FunctionTool(
name = "get_current_weather",
description = Some("Get the current weather in a given location"),
parameters = Map(
"type" -> "object",
"properties" -> Map(
"location" -> Map(
"type" -> "string",
"description" -> "The city and state, e.g. San Francisco, CA"
parameters = Json.toJson(
JsonSchema.Object(
properties = Seq(
"location" -> JsonSchema.String(
description = Some("The city and state, e.g. San Francisco, CA")
),
"unit" -> JsonSchema.String(
description = Some("The unit of temperature"),
`enum` = Seq("celsius", "fahrenheit")
)
),
"unit" -> Map(
"type" -> "string",
"enum" -> Seq("celsius", "fahrenheit")
)
),
"required" -> Seq("location")
)
required = Seq("location")
): JsonSchema
).as[JsObject].value.toMap
)
)

Expand All @@ -40,7 +42,10 @@ object CreateChatToolCompletion extends Example {
messages = messages,
tools = tools,
responseToolChoice = None, // means "auto"
settings = CreateChatCompletionSettings(ModelId.gpt_3_5_turbo_1106)
settings = CreateChatCompletionSettings(
ModelId.gpt_3_5_turbo_1106,
parallel_tool_calls = Some(true)
),
)
.map { response =>
val chatFunCompletionMessage = response.choices.head.message
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,35 @@ package io.cequence.openaiscala.perplexity.service.impl
import akka.NotUsed
import akka.stream.scaladsl.Source
import io.cequence.openaiscala.OpenAIScalaClientException
import io.cequence.openaiscala.domain.{AssistantMessage, BaseMessage, SystemMessage, UserMessage}
import io.cequence.openaiscala.domain.response.{ChatCompletionChunkResponse, ChatCompletionResponse}
import io.cequence.openaiscala.domain.settings.{ChatCompletionResponseFormatType, CreateChatCompletionSettings}
import io.cequence.openaiscala.domain.{
AssistantMessage,
BaseMessage,
SystemMessage,
UserMessage
}
import io.cequence.openaiscala.domain.response.{
ChatCompletionChunkResponse,
ChatCompletionResponse
}
import io.cequence.openaiscala.domain.settings.{
ChatCompletionResponseFormatType,
CreateChatCompletionSettings
}
import io.cequence.openaiscala.JsonFormats.eitherJsonSchemaFormat
import io.cequence.openaiscala.perplexity.domain.Message
import io.cequence.openaiscala.perplexity.domain.response.{SonarChatCompletionChunkResponse, SonarChatCompletionResponse}
import io.cequence.openaiscala.perplexity.domain.settings.{SolarResponseFormat, SonarCreateChatCompletionSettings}
import io.cequence.openaiscala.perplexity.domain.response.{
SonarChatCompletionChunkResponse,
SonarChatCompletionResponse
}
import io.cequence.openaiscala.perplexity.domain.settings.{
SolarResponseFormat,
SonarCreateChatCompletionSettings
}
import io.cequence.openaiscala.perplexity.service.{SonarConsts, SonarService}
import io.cequence.openaiscala.service.{OpenAIChatCompletionService, OpenAIChatCompletionStreamedServiceExtra}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionService,
OpenAIChatCompletionStreamedServiceExtra
}
import io.cequence.wsclient.JsonUtil
import play.api.libs.json.{JsObject, Json}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,10 @@ import io.cequence.openaiscala.OpenAIScalaClientException
import io.cequence.openaiscala.perplexity.domain.Message
import io.cequence.openaiscala.perplexity.domain.settings.SonarCreateChatCompletionSettings
import io.cequence.openaiscala.perplexity.JsonFormats._
import io.cequence.openaiscala.perplexity.domain.response.{SonarChatCompletionChunkResponse, SonarChatCompletionResponse}
import io.cequence.openaiscala.perplexity.domain.response.{
SonarChatCompletionChunkResponse,
SonarChatCompletionResponse
}
import io.cequence.openaiscala.perplexity.service.SonarService
import io.cequence.wsclient.JsonUtil.JsonOps
import io.cequence.wsclient.ResponseImplicits.JsonSafeOps
Expand Down

0 comments on commit dcb109c

Please sign in to comment.