Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: support stop and reload #2

Merged
merged 1 commit into from
Nov 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
feat: support stop and reload
  • Loading branch information
himself65 committed Nov 20, 2023
commit 0ba1cde3a517f1c2265f582b857cc43ced1eaa71
3 changes: 3 additions & 0 deletions examples/llamaindex-straming/.eslintrc.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"extends": "next/core-web-vitals"
}
35 changes: 35 additions & 0 deletions examples/llamaindex-straming/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
/node_modules
/.pnp
.pnp.js

# testing
/coverage

# next.js
/.next/
/out/

# production
/build

# misc
.DS_Store
*.pem

# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*

# local env files
.env*.local

# vercel
.vercel

# typescript
*.tsbuildinfo
next-env.d.ts
7 changes: 7 additions & 0 deletions examples/llamaindex-straming/app/api/chat/engine/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import { LLM, SimpleChatEngine } from "llamaindex";

export async function createChatEngine(llm: LLM) {
return new SimpleChatEngine({
llm,
});
}
35 changes: 35 additions & 0 deletions examples/llamaindex-straming/app/api/chat/llamaindex-stream.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import {
createCallbacksTransformer,
createStreamDataTransformer,
trimStartOfStreamHelper,
type AIStreamCallbacksAndOptions,
} from "ai";

function createParser(res: AsyncGenerator<any>) {
const trimStartOfStream = trimStartOfStreamHelper();
return new ReadableStream<string>({
async pull(controller): Promise<void> {
const { value, done } = await res.next();
if (done) {
controller.close();
return;
}

const text = trimStartOfStream(value ?? "");
if (text) {
controller.enqueue(text);
}
},
});
}

export function LlamaIndexStream(
res: AsyncGenerator<any>,
callbacks?: AIStreamCallbacksAndOptions,
): ReadableStream {
return createParser(res)
.pipeThrough(createCallbacksTransformer(callbacks))
.pipeThrough(
createStreamDataTransformer(callbacks?.experimental_streamData),
);
}
49 changes: 49 additions & 0 deletions examples/llamaindex-straming/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import { Message, StreamingTextResponse } from "ai";
import { OpenAI } from "llamaindex";
import { NextRequest, NextResponse } from "next/server";
import { createChatEngine } from "./engine";
import { LlamaIndexStream } from "./llamaindex-stream";

export const runtime = "nodejs";
export const dynamic = "force-dynamic";

export async function POST(request: NextRequest) {
try {
const body = await request.json();
const { messages }: { messages: Message[] } = body;
const lastMessage = messages.pop();
if (!messages || !lastMessage || lastMessage.role !== "user") {
return NextResponse.json(
{
error:
"messages are required in the request body and the last message must be from the user",
},
{ status: 400 },
);
}

const llm = new OpenAI({
model: "gpt-3.5-turbo",
});

const chatEngine = await createChatEngine(llm);

const response = await chatEngine.chat(lastMessage.content, messages, true);

// Transform the response into a readable stream
const stream = LlamaIndexStream(response);

// Return a StreamingTextResponse, which can be consumed by the client
return new StreamingTextResponse(stream);
} catch (error) {
console.error("[LlamaIndex]", error);
return NextResponse.json(
{
error: (error as Error).message,
},
{
status: 500,
},
);
}
}
40 changes: 40 additions & 0 deletions examples/llamaindex-straming/app/components/chat-section.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
'use client'

import { chatAtoms } from 'jotai-ai'
import { ChatInput, ChatMessages } from './ui/chat'
import { useAtom, useAtomValue, useSetAtom } from 'jotai/react'

const {
messagesAtom,
inputAtom,
submitAtom,
isLoadingAtom,
reloadAtom,
stopAtom
} = chatAtoms()

export default function ChatSection () {
const messages = useAtomValue(messagesAtom)
const [input, handleInputChange] = useAtom(inputAtom)
const handleSubmit = useSetAtom(submitAtom)
const isLoading = useAtomValue(isLoadingAtom)
const reload = useSetAtom(reloadAtom)
const stop = useSetAtom(stopAtom)

return (
<div className="space-y-4 max-w-5xl w-full">
<ChatMessages
messages={messages}
isLoading={isLoading}
reload={reload}
stop={stop}
/>
<ChatInput
input={input}
handleSubmit={handleSubmit}
handleInputChange={handleInputChange}
isLoading={isLoading}
/>
</div>
)
}
28 changes: 28 additions & 0 deletions examples/llamaindex-straming/app/components/header.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import Image from "next/image";

export default function Header() {
return (
<div className="z-10 max-w-5xl w-full items-center justify-between font-mono text-sm lg:flex">
<p className="fixed left-0 top-0 flex w-full justify-center border-b border-gray-300 bg-gradient-to-b from-zinc-200 pb-6 pt-8 backdrop-blur-2xl dark:border-neutral-800 dark:bg-zinc-800/30 dark:from-inherit lg:static lg:w-auto lg:rounded-xl lg:border lg:bg-gray-200 lg:p-4 lg:dark:bg-zinc-800/30">
Get started by editing&nbsp;
<code className="font-mono font-bold">app/page.tsx</code>
</p>
<div className="fixed bottom-0 left-0 flex h-48 w-full items-end justify-center bg-gradient-to-t from-white via-white dark:from-black dark:via-black lg:static lg:h-auto lg:w-auto lg:bg-none">
<a
href="https://www.llamaindex.ai/"
className="flex items-center justify-center font-nunito text-lg font-bold gap-2"
>
<span>Built by LlamaIndex</span>
<Image
className="rounded-xl"
src="/llama.png"
alt="Llama Logo"
width={40}
height={40}
priority
/>
</a>
</div>
</div>
);
}
1 change: 1 addition & 0 deletions examples/llamaindex-straming/app/components/ui/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Using the chat component from https://github.com/marcusschiesser/ui (based on https://ui.shadcn.com/)
56 changes: 56 additions & 0 deletions examples/llamaindex-straming/app/components/ui/button.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { Slot } from "@radix-ui/react-slot";
import { cva, type VariantProps } from "class-variance-authority";
import * as React from "react";

import { cn } from "./lib/utils";

const buttonVariants = cva(
"inline-flex items-center justify-center whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50",
{
variants: {
variant: {
default: "bg-primary text-primary-foreground hover:bg-primary/90",
destructive:
"bg-destructive text-destructive-foreground hover:bg-destructive/90",
outline:
"border border-input bg-background hover:bg-accent hover:text-accent-foreground",
secondary:
"bg-secondary text-secondary-foreground hover:bg-secondary/80",
ghost: "hover:bg-accent hover:text-accent-foreground",
link: "text-primary underline-offset-4 hover:underline",
},
size: {
default: "h-10 px-4 py-2",
sm: "h-9 rounded-md px-3",
lg: "h-11 rounded-md px-8",
icon: "h-10 w-10",
},
},
defaultVariants: {
variant: "default",
size: "default",
},
},
);

export interface ButtonProps
extends React.ButtonHTMLAttributes<HTMLButtonElement>,
VariantProps<typeof buttonVariants> {
asChild?: boolean;
}

const Button = React.forwardRef<HTMLButtonElement, ButtonProps>(
({ className, variant, size, asChild = false, ...props }, ref) => {
const Comp = asChild ? Slot : "button";
return (
<Comp
className={cn(buttonVariants({ variant, size, className }))}
ref={ref}
{...props}
/>
);
},
);
Button.displayName = "Button";

export { Button, buttonVariants };
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import { PauseCircle, RefreshCw } from "lucide-react";

import { Button } from "../button";
import { ChatHandler } from "./chat.interface";

export default function ChatActions(
props: Pick<ChatHandler, "stop" | "reload"> & {
showReload?: boolean;
showStop?: boolean;
},
) {
return (
<div className="space-x-4">
{props.showStop && (
<Button variant="outline" size="sm" onClick={props.stop}>
<PauseCircle className="mr-2 h-4 w-4" />
Stop generating
</Button>
)}
{props.showReload && (
<Button variant="outline" size="sm" onClick={props.reload}>
<RefreshCw className="mr-2 h-4 w-4" />
Regenerate
</Button>
)}
</div>
);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { User2 } from "lucide-react";
import Image from "next/image";

export default function ChatAvatar({ role }: { role: string }) {
if (role === "user") {
return (
<div className="flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md border bg-background shadow">
<User2 className="h-4 w-4" />
</div>
);
}

return (
<div className="flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md border bg-black text-white shadow">
<Image
className="rounded-md"
src="/llama.png"
alt="Llama Logo"
width={24}
height={24}
priority
/>
</div>
);
}
29 changes: 29 additions & 0 deletions examples/llamaindex-straming/app/components/ui/chat/chat-input.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import { Button } from "../button";
import { Input } from "../input";
import { ChatHandler } from "./chat.interface";

export default function ChatInput(
props: Pick<
ChatHandler,
"isLoading" | "handleSubmit" | "handleInputChange" | "input"
>,
) {
return (
<form
onSubmit={props.handleSubmit}
className="flex w-full items-start justify-between gap-4 rounded-xl bg-white p-4 shadow-xl"
>
<Input
autoFocus
name="message"
placeholder="Type a message"
className="flex-1"
value={props.input}
onChange={props.handleInputChange}
/>
<Button type="submit" disabled={props.isLoading}>
Send message
</Button>
</form>
);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import { Check, Copy } from "lucide-react";

import { Button } from "../button";
import ChatAvatar from "./chat-avatar";
import { Message } from "./chat.interface";
import Markdown from "./markdown";
import { useCopyToClipboard } from "./use-copy-to-clipboard";

export default function ChatMessage(chatMessage: Message) {
const { isCopied, copyToClipboard } = useCopyToClipboard({ timeout: 2000 });
return (
<div className="flex items-start gap-4 pr-5 pt-5">
<ChatAvatar role={chatMessage.role} />
<div className="group flex flex-1 justify-between gap-2">
<div className="flex-1">
<Markdown content={chatMessage.content} />
</div>
<Button
onClick={() => copyToClipboard(chatMessage.content)}
size="icon"
variant="ghost"
className="h-8 w-8 opacity-0 group-hover:opacity-100"
>
{isCopied ? (
<Check className="h-4 w-4" />
) : (
<Copy className="h-4 w-4" />
)}
</Button>
</div>
</div>
);
}
Loading