Skip to content
This repository has been archived by the owner on Apr 18, 2024. It is now read-only.

Feat: Add support for Mistral AI and Azure OpenAI monitoring #6

Merged
merged 18 commits into from
Mar 17, 2024
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 21 additions & 3 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@ on:
branches: [ "main" ]
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
- cron: '0 0 * * 0'

env:
OPENAI_API_TOKEN: ${{ secrets.OPENAI_API_TOKEN }}
COHERE_API_TOKEN: ${{ secrets.COHERE_API_TOKEN }}
ANTHROPIC_API_TOKEN: ${{ secrets.ANTHROPIC_API_TOKEN }}
DOKU_URL: ${{ secrets.DOKU_URL }}
DOKU_TOKEN: ${{ secrets.DOKU_TOKEN }}
MISTRAL_API_TOKEN: ${{ secrets.MISTRAL_API_TOKEN }}
DOKU_URL: http://127.0.0.1:9044

jobs:
build:
Expand All @@ -27,6 +27,24 @@ jobs:
steps:
- uses: actions/checkout@v4

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@0d103c3126aa41d772a8362f6aa67afac040f80c # v3.1.0

- name: Setup Doku Stack
run: docker-compose up -d

- name: Sleep for 30 seconds
run: sleep 30

- name: Make API Request and Set DOKU_TOKEN
run: |
RESPONSE=$(curl -X POST $DOKU_URL/api/keys \
-H 'Authorization: ""' \
-H 'Content-Type: application/json' \
-d '{"Name": "GITHUBACTION"}')
MESSAGE=$(echo $RESPONSE | jq -r '.message')
echo "DOKU_TOKEN=${MESSAGE}" >> $GITHUB_ENV

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
- ✅ OpenAI
- ✅ Anthropic
- ✅ Cohere
- ✅ Mistral

Deployed as the backbone for all your LLM monitoring needs, `dokumetry` channels crucial usage data directly to Doku, streamlining the tracking process. Unlock efficient and effective observability for your LLM applications with DokuMetry.

Expand Down
52 changes: 52 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
version: '3.8'

services:
clickhouse:
image: clickhouse/clickhouse-server:24.1.5
container_name: clickhouse
environment:
CLICKHOUSE_PASSWORD: ${DOKU_DB_PASSWORD:-DOKU}
CLICKHOUSE_USER: ${DOKU_DB_USER:-default}
volumes:
- clickhouse-data:/var/lib/clickhouse
ports:
- "9000:9000"
- "8123:8123"
restart: always

doku-ingester:
image: ghcr.io/dokulabs/doku-ingester:latest
container_name: doku-ingester
environment:
DOKU_DB_HOST: clickhouse
DOKU_DB_PORT: 9000
DOKU_DB_NAME: ${DOKU_DB_NAME:-default}
DOKU_DB_USER: ${DOKU_DB_USER:-default}
DOKU_DB_PASSWORD: ${DOKU_DB_PASSWORD:-DOKU}
ports:
- "9044:9044"
depends_on:
- clickhouse
restart: always

doku-client:
image: ghcr.io/dokulabs/doku-client:latest
container_name: doku-client
environment:
INIT_DB_HOST: clickhouse
INIT_DB_PORT: 8123
INIT_DB_DATABASE: ${DOKU_DB_NAME:-default}
INIT_DB_USERNAME: ${DOKU_DB_USER:-default}
INIT_DB_PASSWORD: ${DOKU_DB_PASSWORD:-DOKU}
SQLITE_DATABASE_URL: file:/app/client/data/data.db
ports:
- "3000:3000"
depends_on:
- clickhouse
volumes:
- doku-client-data:/app/client/data
restart: always

volumes:
clickhouse-data:
doku-client-data:
22 changes: 19 additions & 3 deletions src/dokumetry/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,19 @@
__init__ module for dokumetry package.
"""
from anthropic import AsyncAnthropic, Anthropic

from openai import AsyncOpenAI, OpenAI
from openai import AsyncOpenAI, OpenAI, AzureOpenAI, AsyncAzureOpenAI
from mistralai.async_client import MistralAsyncClient
from mistralai.client import MistralClient

from .openai import init as init_openai
from .async_openai import init as init_async_openai
from .azure_openai import init as init_azure_openai
from .async_azure_openai import init as init_async_azure_openai
from .anthropic import init as init_anthropic
from .async_anthropic import init as init_async_anthropic
from .cohere import init as init_cohere
from .mistral import init as init_mistral
from .async_mistral import init as init_async_mistral

# pylint: disable=too-few-public-methods
class DokuConfig:
Expand Down Expand Up @@ -52,10 +57,21 @@
elif isinstance(llm, AsyncOpenAI):
init_async_openai(llm, doku_url, api_key, environment, application_name, skip_resp)
return
# pylint: disable=no-else-return
if hasattr(llm, 'moderations') and callable(llm.chat.completions.create) and ('.openai.azure.com/' in str(llm.base_url)):
Dismissed Show dismissed Hide dismissed
if isinstance(llm, AzureOpenAI):
init_openai(llm, doku_url, api_key, environment, application_name, skip_resp)
elif isinstance(llm, AsyncAzureOpenAI):
init_async_openai(llm, doku_url, api_key, environment, application_name, skip_resp)
return
elif hasattr(llm, 'generate') and callable(llm.generate):
init_cohere(llm, doku_url, api_key, environment, application_name, skip_resp)
return
elif hasattr(llm, 'chat') and callable(llm.chat):
if isinstance(llm, MistralClient):
init_mistral(llm, doku_url, api_key, environment, application_name, skip_resp)
elif isinstance(llm, MistralAsyncClient):
init_async_mistral(llm, doku_url, api_key, environment, application_name, skip_resp)
return
elif hasattr(llm, 'messages') and callable(llm.messages.create):
if isinstance(llm, AsyncAnthropic):
init_async_anthropic(llm, doku_url, api_key, environment, application_name, skip_resp)
Expand Down
Loading
Loading