Skip to content

Commit

Permalink
Chat Completion - createChatFunCompletion deprecated and new createCh…
Browse files Browse the repository at this point in the history
…atToolCompletion added. The usage stayed however pretty much the same.
  • Loading branch information
peterbanda committed Nov 13, 2023
1 parent f717291 commit d1357fc
Show file tree
Hide file tree
Showing 3 changed files with 69 additions and 4 deletions.
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
package io.cequence.openaiscala.service

import play.api.libs.json.{JsObject, JsValue, Json}
import play.api.libs.json.{JsArray, JsObject, JsValue, Json}
import io.cequence.openaiscala.JsonUtil.JsonOps
import io.cequence.openaiscala.JsonFormats._
import io.cequence.openaiscala.OpenAIScalaClientException
import io.cequence.openaiscala.domain.settings._
import io.cequence.openaiscala.domain.response._
import io.cequence.openaiscala.domain.{FunMessageSpec, FunctionSpec}
import io.cequence.openaiscala.domain.{FunMessageSpec, FunctionSpec, ToolSpec}

import java.io.File
import scala.concurrent.Future
Expand Down Expand Up @@ -53,6 +53,43 @@ private trait OpenAIServiceImpl extends OpenAICoreServiceImpl with OpenAIService
)
}

override def createChatToolCompletion(
messages: Seq[FunMessageSpec],
tools: Seq[ToolSpec],
responseToolChoice: Option[String] = None,
settings: CreateChatCompletionSettings = DefaultSettings.CreateChatFunCompletion
): Future[ChatFunCompletionResponse] = {
val coreParams =
createBodyParamsForChatCompletion(messages, settings, stream = false)

val extraParams = jsonBodyParams(
Param.functions -> Some(JsArray(tools.map(toolToJson))),
Param.function_call -> responseToolChoice.map(name =>
Map(
"type" -> "function",
"function" -> Map("name" -> name)
)
) // otherwise "auto" is used by default
)

execPOST(
EndPoint.chat_completions,
bodyParams = coreParams ++ extraParams
).map(
_.asSafe[ChatFunCompletionResponse]
)
}

// handle new tool types here
private def toolToJson(tool: ToolSpec) =
tool match {
case x: FunctionSpec =>
Json.obj(
"type" -> "function",
"function" -> Json.toJson(x)
)
}

override def createEdit(
input: String,
instruction: String,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package io.cequence.openaiscala.domain

sealed trait ToolSpec

case class FunctionSpec(
// The name of the function to be called.
// Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.
Expand All @@ -11,4 +13,4 @@ case class FunctionSpec(
// The parameters the functions accepts, described as a JSON Schema object.
// See the guide for examples, and the JSON Schema reference for documentation about the format.
parameters: Map[String, Any]
)
) extends ToolSpec
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package io.cequence.openaiscala.service

import io.cequence.openaiscala.domain.{FunMessageSpec, FunctionSpec}
import io.cequence.openaiscala.domain.{FunMessageSpec, FunctionSpec, ToolSpec}
import io.cequence.openaiscala.domain.settings._
import io.cequence.openaiscala.domain.response._

Expand Down Expand Up @@ -61,13 +61,39 @@ trait OpenAIService extends OpenAICoreService {
* @see
* <a href="https://platform.openai.com/docs/api-reference/chat/create">OpenAI Doc</a>
*/
@Deprecated("Use createChatToolCompletion instead")
def createChatFunCompletion(
messages: Seq[FunMessageSpec],
functions: Seq[FunctionSpec],
responseFunctionName: Option[String] = None,
settings: CreateChatCompletionSettings = DefaultSettings.CreateChatFunCompletion
): Future[ChatFunCompletionResponse]

/**
* Creates a model response for the given chat conversation expecting a tool call.
*
* @param messages
* A list of messages comprising the conversation so far.
* @param tools
* A list of tools the model may call. Currently, only functions are supported as a tool.
* Use this to provide a list of functions the model may generate JSON inputs for.
* @param responseToolChoice
* Controls which (if any) function/tool is called by the model.
* Specifying a particular function forces the model to call that function (must be listed in `tools`).
* Otherwise, the default "auto" mode is used where the model can pick between generating a message or calling a function.
* @param settings
* @return
* chat completion response
* @see
* <a href="https://platform.openai.com/docs/api-reference/chat/create">OpenAI Doc</a>
*/
def createChatToolCompletion(
messages: Seq[FunMessageSpec],
tools: Seq[ToolSpec],
responseToolChoice: Option[String] = None,
settings: CreateChatCompletionSettings = DefaultSettings.CreateChatFunCompletion
): Future[ChatFunCompletionResponse]

/**
* Creates a new edit for the provided input, instruction, and parameters.
*
Expand Down

0 comments on commit d1357fc

Please sign in to comment.