This is a Preview Build for Next Week's Release
I'm releasing this to gather some feedback about the Chat component before I push the main release in the beginning of March. Please let me know below what you think of the new chat
component. What do you love? What needs to change? What should be added? Please leave your ideas below! 🙏
Download here Pre-release here:
https://github.com/johnlindquist/kitapp/releases/tag/v1.50.6
Join our Discord:
Chat Component Hello World
Open chat-hello-world in Script Kit
// Name: Chat Hello Worldimport "@johnlindquist/kit"let messages = await chat({onSubmit: input => {chat.addMessage(`You said, "${input}"`)},})inspect(messages)
Chat Component with AI
Open openai-chat in Script Kit
// Name: OpenAI Chatimport "@johnlindquist/kit"await npm("openai")await npm("langchain")let { OpenAI } = await import("langchain")let { ConversationChain } = await import("langchain/chains")let { BufferMemory } = await import("langchain/memory")let llm = new OpenAI({openAIApiKey: await env("OPENAI_API_KEY", {hint: `Grab a key from <a href="https://platform.openai.com/account/api-keys">here</a>`,}),streaming: true,callbackManager: {handleStart: () => {chat.addMessage("")},handleNewToken: token => {chat.pushToken(token)},},})let memory = new BufferMemory()let chain = new ConversationChain({llm,memory,})let messages = await chat({ignoreBlur: true,alwaysOnTop: true,onSubmit: async input => {await chain.call({ input })},})inspect(messages)
Chat with a .txt File
/*# Doc TalkChat with a .txt file such as a book. In chat, ask questions like:- "Who are the main characters?"- "Where are the key settings"?- "Summarize the story arc"> Note: The chat's knowledge is limited to the loaded .txt file*/// Name: Doc Talkimport "@johnlindquist/kit"await npm("openai")await npm("langchain")await npm("hnswlib-node")let { OpenAI } = await import("langchain/llms")let { VectorDBQAChain } = await import("langchain/chains")let { HNSWLib } = await import("langchain/vectorstores")let { OpenAIEmbeddings } = await import("langchain/embeddings")let { RecursiveCharacterTextSplitter } = await import("langchain/text_splitter")let filePath = await path({hint: `Select a .txt file to talk to or <a href="submit:__ROMEO__">Download Romeo and Juliet</a>`,})if (filePath === "__ROMEO__") {let buffer = await download(`https://www.gutenberg.org/cache/epub/1513/pg1513.txt`)filePath = home("Downloads", "romeo-and-juliet.txt")await writeFile(filePath, buffer)}wait(250).then(() => setLoading(true))div(md(`# Loading...~~~Loading in ${filePath}~~~`))let text = await readFile(filePath, "utf-8")const model = new OpenAI({streaming: true,callbackManager: {handleStart: () => {chat.addMessage("")},handleNewToken: token => {chat.pushToken(token)},},})const textSplitter = new RecursiveCharacterTextSplitter({chunkSize: 1000,})const docs = textSplitter.createDocuments([text])const vectorStore = await HNSWLib.fromDocuments(docs,new OpenAIEmbeddings())const chain = VectorDBQAChain.fromLLM(model, vectorStore)setLoading(false)let messages = await chat({onSubmit: async query => {const res = await chain.call({input_documents: docs,query,})},})inspect(messages)
Google AI
/*# Google AI ExperimentThis is 100% experimental. I'm still learning the ins and outs of langchain.If you have feedback on how to improve, PLEASE share 🙏\- John Lindquist*/// Name: Google AIimport "@johnlindquist/kit"await npm("openai")await npm("langchain")// Note: This lib is updated fairly frequently to keep up with Google's changes: https://www.npmjs.com/package/googlethisawait npm("googlethis")await npm("@extractus/article-extractor")let { OpenAI } = await import("langchain")let { Tool } = await import("langchain/tools")let { initializeAgentExecutor } = await import("langchain/agents")class GoogleThis extends Tool {name = "search"description ="a search engine. useful for when you need to answer questions about current events. input should be a search query. Output should include the best result and associated URL."formatResults = response => {let data = response?.results?.slice(0, 3)?.map(r => {return `title: ${r.title}description: ${r.description}url: ${r.url}`}).join("\n")if (response?.knowledge_panel?.title)data = `Best title: ${response?.knowledge_panel?.title}${data}`if (response?.knowledge_panel?.description)data = `Best description: ${response?.knowledge_panel?.description}${data}`return data}async call(input: string) {let google = await import("googlethis")let response = await google.search(input)return this.formatResults(response)}}class ReadURL extends Tool {name = "read"description = `a web scraper. Input is a url. Output is the contents of the page.`formatArticle = (url, article) => {let formatted = ``try {formatted = Object.entries(article).filter(([key, value]) => key && value).map(([key, value]) => {// In case the article contents are too long// TODO: Should probably wrap over to a "Document" paradigm here...if (typeof value === "string") {return [key, value?.slice(0, 1000) || ""]}if (Array.isArray(value)) {return [key, value.join(",")]}return [key, value]}).map(([key, value]) => `${key}: ${value}`).join("\n")} catch (error) {formatted = `Couldn't read the contents of ${url}`}return formatted}async call(url: string) {let { extract } = await import("@extractus/article-extractor")let article = await extract(url)let formatted = this.formatArticle(url, article)return formatted}}let tools = [new GoogleThis(), new ReadURL()]let yankAnswer = async output => {return output?.generations?.at(0)?.at(0)?.text.split("\n")?.at(-1).replace("Final Answer: ", "")}let llm = new OpenAI({temperature: 0.7,streaming: true,callbackManager: {handleError: log,handleEnd: output => {let answer = yankAnswer(output)history = `${history}AI: ${answer}`},handleStart: () => {chat.addMessage("")},handleNewToken: token => {chat.pushToken(token)},},})let executor = await initializeAgentExecutor(tools,llm,"zero-shot-react-description")// TODO: I'm sure this can be _vastly_ improvedlet history = `The AI should always include relevant URLs when possible.The AI should use the given information and URLs to explain why it chose the answer.The AI should avoid commas by formatting with newlines`let messages = await chat({ignoreBlur: true,alwaysOnTop: true,onSubmit: async input => {history = `${history}Me: ${input}`await executor.call({ input: history })},})inspect(messages)