-
Notifications
You must be signed in to change notification settings - Fork 0
/
AppUI.tsx
109 lines (97 loc) · 3.5 KB
/
AppUI.tsx
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
"use client";
import { useState } from "react";
import PromptInput from "@/components/PromptInput";
import ModelResponse from "@/components/ModelResponse";
import { ModelSelector } from "@/components/ModelSelector";
import { llmBattle, STREAMS } from "@/trigger/ai";
import { LLMModel, LLMModelEval } from "@/lib/schemas";
import { useRealtimeTaskTriggerWithStreams } from "@trigger.dev/react-hooks";
import EvalResponse from "./EvalResponse";
export default function AppUI({ triggerToken }: { triggerToken: string }) {
const triggerInstance = useRealtimeTaskTriggerWithStreams<
typeof llmBattle,
STREAMS
>("llm-battle", {
accessToken: triggerToken,
baseURL: process.env.NEXT_PUBLIC_TRIGGER_API_URL,
});
const [selectedModels, setSelectedModels] = useState<
{ model1: LLMModel; model2: LLMModel } | undefined
>(undefined);
const isLoading = triggerInstance.isLoading;
const handleModelSelect = (model1: LLMModel, model2: LLMModel) => {
setSelectedModels({ model1, model2 });
};
const handleSubmit = async (inputPrompt: string) => {
if (!selectedModels) {
return;
}
triggerInstance.submit({
prompt: inputPrompt,
...selectedModels,
});
};
// isLoading should also be true until both streams are finished
const model1Finished = triggerInstance.streams.model1?.find(
(part) => part.type === "step-finish"
);
const model2Finished = triggerInstance.streams.model2?.find(
(part) => part.type === "step-finish"
);
const $isLoading =
isLoading ||
(!!triggerInstance.handle && (!model1Finished || !model2Finished));
const parsedEval = triggerInstance.streams.eval
? LLMModelEval.safeParse(
triggerInstance.streams.eval[triggerInstance.streams.eval.length - 1]
)
: undefined;
const evaluation = parsedEval?.success ? parsedEval.data : undefined;
return (
<div className="min-h-screen bg-gray-100 py-8 px-4 sm:px-6 lg:px-8">
<div className="max-w-6xl mx-auto">
<h1 className="text-3xl font-bold text-center mb-2">
Trigger.dev LLM Battle
</h1>
<h2 className="text-large text-center mb-8">
Realtime Streams + Trigger React hooks
</h2>
<div className="space-y-6">
<ModelSelector onSelect={handleModelSelect} />
<PromptInput onSubmit={handleSubmit} isLoading={$isLoading} />
<div className="grid gap-6 md:grid-cols-2">
{selectedModels && (
<>
<ModelResponse
key={"model1"}
model={selectedModels.model1}
stream={triggerInstance.streams.model1 ?? []}
isWinner={
evaluation
? evaluation.model1.score > evaluation.model2.score
: undefined
}
score={evaluation?.model1.score}
/>
<ModelResponse
key={"model2"}
model={selectedModels.model2}
stream={triggerInstance.streams.model2 ?? []}
isWinner={
evaluation
? evaluation.model2.score > evaluation.model1.score
: undefined
}
score={evaluation?.model2.score}
/>
</>
)}
</div>
{evaluation && (
<EvalResponse isLoading={$isLoading} evaluation={evaluation} />
)}
</div>
</div>
</div>
);
}