Skip to content

Commit

Permalink
Deepseek provider / models with two examples
Browse files Browse the repository at this point in the history
  • Loading branch information
peterbanda committed Nov 26, 2024
1 parent 3893656 commit 7637b96
Show file tree
Hide file tree
Showing 6 changed files with 93 additions and 1 deletion.
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ val scala3 = "3.2.2"

ThisBuild / organization := "io.cequence"
ThisBuild / scalaVersion := scala212
ThisBuild / version := "1.1.1.RC.11"
ThisBuild / version := "1.1.1.RC.17"
ThisBuild / isSnapshot := false

lazy val commonSettings = Seq(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -202,4 +202,9 @@ object NonOpenAIModelId {

// context 131072
val grok_beta = "grok-beta"

// Deepseek
// context 64K, 4K (8KBeta)
val deepseek_chat = "deepseek-chat"
val deepseek_coder = "deepseek-coder"
}
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,6 @@ object ChatProviderSettings {
val togetherAI =
ProviderSettings("https://api.together.xyz/v1/", "TOGETHERAI_API_KEY")
val grok = ProviderSettings("https://api.x.ai/v1/", "GROK_API_KEY")
val deepseek = ProviderSettings("https://api.deepseek.com/", "DEEPSEEK_API_KEY")
val deepseekBeta = ProviderSettings("https://api.deepseek.com/beta/", "DEEPSEEK_API_KEY")
}
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,16 @@ object ChatCompletionProvider {
): OpenAIChatCompletionStreamedService =
AnthropicServiceFactory.asOpenAI(withCache = withCache)

def deepseek(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedService = provide(ChatProviderSettings.deepseek)

def deepseekBeta(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedService = provide(ChatProviderSettings.deepseekBeta)

private def provide(
settings: ProviderSettings
)(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.OpenAIChatCompletionService

import scala.concurrent.Future

/**
* Requires `DEEPSEEK_API_KEY` environment variable to be set.
*/
object DeepseekCreateChatCompletion extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = ChatCompletionProvider.deepseek

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
UserMessage("What is the weather like in Norway?")
)

private val modelId = NonOpenAIModelId.deepseek_chat

override protected def run: Future[_] =
service
.createChatCompletion(
messages = messages,
settings = CreateChatCompletionSettings(
model = modelId,
temperature = Some(0.1),
max_tokens = Some(1024)
)
)
.map(printMessageContent)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
package io.cequence.openaiscala.examples.nonopenai

import akka.stream.scaladsl.Sink
import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.OpenAIChatCompletionStreamedServiceExtra

import scala.concurrent.Future

// requires `openai-scala-client-stream` as a dependency and `DEEPSEEK_API_KEY` environment variable to be set
object DeepseekCreateChatCompletionStreamed
extends ExampleBase[OpenAIChatCompletionStreamedServiceExtra] {

override val service: OpenAIChatCompletionStreamedServiceExtra = ChatCompletionProvider.deepseekBeta

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
UserMessage("What is the weather like in Norway?")
)

private val modelId = NonOpenAIModelId.deepseek_chat

override protected def run: Future[_] =
service
.createChatCompletionStreamed(
messages = messages,
settings = CreateChatCompletionSettings(
model = modelId,
temperature = Some(0.01),
max_tokens = Some(512)
)
)
.runWith(
Sink.foreach { completion =>
val content = completion.choices.headOption.flatMap(_.delta.content)
print(content.getOrElse(""))
}
)
}

0 comments on commit 7637b96

Please sign in to comment.