From 7637b965a62a3a66f7157e6b1342376b518358af Mon Sep 17 00:00:00 2001 From: Peter Banda Date: Tue, 26 Nov 2024 14:36:28 +0100 Subject: [PATCH] Deepseek provider / models with two examples --- build.sbt | 2 +- .../openaiscala/domain/NonOpenAIModelId.scala | 5 +++ .../service/ChatProviderSettings.scala | 2 + .../nonopenai/ChatCompletionProvider.scala | 10 +++++ .../DeepseekCreateChatCompletion.scala | 35 ++++++++++++++++ ...DeepseekCreateChatCompletionStreamed.scala | 40 +++++++++++++++++++ 6 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/DeepseekCreateChatCompletion.scala create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/DeepseekCreateChatCompletionStreamed.scala diff --git a/build.sbt b/build.sbt index 419412e0..db536a44 100755 --- a/build.sbt +++ b/build.sbt @@ -7,7 +7,7 @@ val scala3 = "3.2.2" ThisBuild / organization := "io.cequence" ThisBuild / scalaVersion := scala212 -ThisBuild / version := "1.1.1.RC.11" +ThisBuild / version := "1.1.1.RC.17" ThisBuild / isSnapshot := false lazy val commonSettings = Seq( diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/domain/NonOpenAIModelId.scala b/openai-core/src/main/scala/io/cequence/openaiscala/domain/NonOpenAIModelId.scala index 5346b703..e6e62214 100644 --- a/openai-core/src/main/scala/io/cequence/openaiscala/domain/NonOpenAIModelId.scala +++ b/openai-core/src/main/scala/io/cequence/openaiscala/domain/NonOpenAIModelId.scala @@ -202,4 +202,9 @@ object NonOpenAIModelId { // context 131072 val grok_beta = "grok-beta" + + // Deepseek + // context 64K, 4K (8KBeta) + val deepseek_chat = "deepseek-chat" + val deepseek_coder = "deepseek-coder" } diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/service/ChatProviderSettings.scala b/openai-core/src/main/scala/io/cequence/openaiscala/service/ChatProviderSettings.scala index dbdd183e..ffb1b1dc 100644 --- a/openai-core/src/main/scala/io/cequence/openaiscala/service/ChatProviderSettings.scala +++ b/openai-core/src/main/scala/io/cequence/openaiscala/service/ChatProviderSettings.scala @@ -13,4 +13,6 @@ object ChatProviderSettings { val togetherAI = ProviderSettings("https://api.together.xyz/v1/", "TOGETHERAI_API_KEY") val grok = ProviderSettings("https://api.x.ai/v1/", "GROK_API_KEY") + val deepseek = ProviderSettings("https://api.deepseek.com/", "DEEPSEEK_API_KEY") + val deepseekBeta = ProviderSettings("https://api.deepseek.com/beta/", "DEEPSEEK_API_KEY") } diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/ChatCompletionProvider.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/ChatCompletionProvider.scala index 74617f86..99af1e36 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/ChatCompletionProvider.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/ChatCompletionProvider.scala @@ -90,6 +90,16 @@ object ChatCompletionProvider { ): OpenAIChatCompletionStreamedService = AnthropicServiceFactory.asOpenAI(withCache = withCache) + def deepseek( + implicit ec: ExecutionContext, + m: Materializer + ): OpenAIChatCompletionStreamedService = provide(ChatProviderSettings.deepseek) + + def deepseekBeta( + implicit ec: ExecutionContext, + m: Materializer + ): OpenAIChatCompletionStreamedService = provide(ChatProviderSettings.deepseekBeta) + private def provide( settings: ProviderSettings )( diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/DeepseekCreateChatCompletion.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/DeepseekCreateChatCompletion.scala new file mode 100644 index 00000000..eb4100e3 --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/DeepseekCreateChatCompletion.scala @@ -0,0 +1,35 @@ +package io.cequence.openaiscala.examples.nonopenai + +import io.cequence.openaiscala.domain._ +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings +import io.cequence.openaiscala.examples.ExampleBase +import io.cequence.openaiscala.service.OpenAIChatCompletionService + +import scala.concurrent.Future + +/** + * Requires `DEEPSEEK_API_KEY` environment variable to be set. + */ +object DeepseekCreateChatCompletion extends ExampleBase[OpenAIChatCompletionService] { + + override val service: OpenAIChatCompletionService = ChatCompletionProvider.deepseek + + private val messages = Seq( + SystemMessage("You are a helpful assistant."), + UserMessage("What is the weather like in Norway?") + ) + + private val modelId = NonOpenAIModelId.deepseek_chat + + override protected def run: Future[_] = + service + .createChatCompletion( + messages = messages, + settings = CreateChatCompletionSettings( + model = modelId, + temperature = Some(0.1), + max_tokens = Some(1024) + ) + ) + .map(printMessageContent) +} diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/DeepseekCreateChatCompletionStreamed.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/DeepseekCreateChatCompletionStreamed.scala new file mode 100644 index 00000000..adc0d698 --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/DeepseekCreateChatCompletionStreamed.scala @@ -0,0 +1,40 @@ +package io.cequence.openaiscala.examples.nonopenai + +import akka.stream.scaladsl.Sink +import io.cequence.openaiscala.domain._ +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings +import io.cequence.openaiscala.examples.ExampleBase +import io.cequence.openaiscala.service.OpenAIChatCompletionStreamedServiceExtra + +import scala.concurrent.Future + +// requires `openai-scala-client-stream` as a dependency and `DEEPSEEK_API_KEY` environment variable to be set +object DeepseekCreateChatCompletionStreamed + extends ExampleBase[OpenAIChatCompletionStreamedServiceExtra] { + + override val service: OpenAIChatCompletionStreamedServiceExtra = ChatCompletionProvider.deepseekBeta + + private val messages = Seq( + SystemMessage("You are a helpful assistant."), + UserMessage("What is the weather like in Norway?") + ) + + private val modelId = NonOpenAIModelId.deepseek_chat + + override protected def run: Future[_] = + service + .createChatCompletionStreamed( + messages = messages, + settings = CreateChatCompletionSettings( + model = modelId, + temperature = Some(0.01), + max_tokens = Some(512) + ) + ) + .runWith( + Sink.foreach { completion => + val content = completion.choices.headOption.flatMap(_.delta.content) + print(content.getOrElse("")) + } + ) +}