Skip to content

Commit

Permalink
Add "Stop Response" feature (#18)
Browse files Browse the repository at this point in the history
Shortcut: cmd+shift+opt+/

Works both in AI Chat and useGPT.

Also contains a few bug fixes.
  • Loading branch information
XInTheDark authored May 8, 2024
1 parent 9ba0cdd commit af04c55
Show file tree
Hide file tree
Showing 2 changed files with 84 additions and 23 deletions.
40 changes: 39 additions & 1 deletion src/aiChat.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ const ChatProvidersReact = chat_providers.map((x) => {
return <Form.Dropdown.Item title={x[0]} value={x[1]} key={x[1]} />;
});

let generationStatus = { stop: false, loading: false };
let get_status = () => generationStatus.stop;

export default function Chat({ launchContext }) {
let toast = async (style, title, message) => {
return await showToast({
Expand Down Expand Up @@ -106,8 +109,9 @@ export default function Chat({ launchContext }) {
let response = "";
let r = await getChatResponse(currentChat, query);
let loadingToast = await toast(Toast.Style.Animated, "Response Loading");
generationStatus = { stop: false, loading: true };

for await (const chunk of await processChunks(r, provider)) {
for await (const chunk of await processChunks(r, provider, get_status)) {
response += chunk;
response = formatResponse(response, provider);
_setChatData(chatData, setChatData, messageID, "", response);
Expand All @@ -127,6 +131,7 @@ export default function Chat({ launchContext }) {
`${chars} chars (${charPerSec} / sec) | ${elapsed.toFixed(1)} sec`
);

generationStatus.loading = false;
pruneChats(chatData, setChatData); // this function effectively only runs periodically
};

Expand Down Expand Up @@ -491,6 +496,16 @@ export default function Chat({ launchContext }) {
/>
<Action.Push icon={Icon.BlankDocument} title="Compose Message" target={<ComposeMessage />} />
<ActionPanel.Section title="Current Chat">
{generationStatus.loading && (
<Action
title="Stop Response"
icon={Icon.Pause}
onAction={() => {
generationStatus = { stop: true, loading: false };
}}
shortcut={{ modifiers: ["cmd", "shift", "opt"], key: "/" }}
/>
)}
<Action
icon={Icon.ArrowClockwise}
title="Regenerate Last Message"
Expand All @@ -502,6 +517,29 @@ export default function Chat({ launchContext }) {
return;
}

if (chat.messages[0].finished === false) {
// We don't prevent the user from regenerating a message that is still loading,
// because there are valid use cases, such as when the extension glitches, but we show an alert.
let userConfirmed = false;
await confirmAlert({
title: "Are you sure?",
message: "Response is still loading. Are you sure you want to regenerate it?",
icon: Icon.ArrowClockwise,
primaryAction: {
title: "Regenerate Message",
onAction: () => {
userConfirmed = true;
},
},
dismissAction: {
title: "Cancel",
},
});
if (!userConfirmed) {
return;
}
}

await toast(Toast.Style.Animated, "Regenerating Last Message");

// We first remove the last message, then insert a null (default) message.
Expand Down
67 changes: 45 additions & 22 deletions src/api/gpt.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ export const is_null_message = (message) => {
return !message || ((message?.prompt || "").length === 0 && (message?.answer || "").length === 0);
};

let generationStatus = { stop: false };
let get_status = () => generationStatus.stop;

export default (
props,
{
Expand Down Expand Up @@ -150,8 +153,9 @@ export default (
} else {
let r = await chatCompletion(messages, options);
let loadingToast = await showToast(Toast.Style.Animated, "Response Loading");
generationStatus.stop = false;

for await (const chunk of await processChunks(r, provider)) {
for await (const chunk of await processChunks(r, provider, get_status)) {
response += chunk;
response = formatResponse(response, provider);
setMarkdown(response);
Expand Down Expand Up @@ -278,26 +282,34 @@ export default (
return page === Pages.Detail ? (
<Detail
actions={
!isLoading && (
<ActionPanel>
{allowPaste && <Action.Paste content={markdown} />}
<Action.CopyToClipboard shortcut={Keyboard.Shortcut.Common.Copy} content={markdown} />
{lastQuery && lastResponse && (
<Action
title="Continue in Chat"
icon={Icon.Message}
shortcut={{ modifiers: ["cmd"], key: "j" }}
onAction={async () => {
await launchCommand({
name: "aiChat",
type: LaunchType.UserInitiated,
context: { query: lastQuery, response: lastResponse, creationName: "" },
});
}}
/>
)}
</ActionPanel>
)
<ActionPanel>
{allowPaste && <Action.Paste content={markdown} />}
<Action.CopyToClipboard shortcut={Keyboard.Shortcut.Common.Copy} content={markdown} />
{lastQuery && (
<Action
title="Continue in Chat"
icon={Icon.Message}
shortcut={{ modifiers: ["cmd"], key: "j" }}
onAction={async () => {
await launchCommand({
name: "aiChat",
type: LaunchType.UserInitiated,
context: { query: lastQuery, response: lastResponse, creationName: "" },
});
}}
/>
)}
{isLoading && (
<Action
title="Stop Response"
icon={Icon.Pause}
onAction={() => {
generationStatus.stop = true;
}}
shortcut={{ modifiers: ["cmd", "shift", "opt"], key: "/" }}
/>
)}
</ActionPanel>
}
isLoading={isLoading}
markdown={markdown}
Expand Down Expand Up @@ -430,7 +442,7 @@ export const formatResponse = (response, provider) => {
};

// Returns an async generator that can be used directly.
export const processChunks = async function* (response, provider) {
export const processChunksAsync = async function* (response, provider) {
if (provider === g4f.providers.Bing) {
let prevChunk = "";
// For Bing, we must not return the last chunk
Expand All @@ -446,3 +458,14 @@ export const processChunks = async function* (response, provider) {
yield* G4F.chunkProcessor(response);
}
};

export const processChunks = async function* (response, provider, status = null) {
// same as processChunksAsync, but stops generating as soon as status() is true
// update every few chunks to reduce performance impact
let i = 0;
for await (const chunk of await processChunksAsync(response, provider)) {
if ((i & 15) === 0 && status && status()) break;
yield chunk;
i++;
}
};

0 comments on commit af04c55

Please sign in to comment.