diff --git a/web/src/components/assistant.tsx b/web/src/components/assistant.tsx index ad5b83a..510b12a 100644 --- a/web/src/components/assistant.tsx +++ b/web/src/components/assistant.tsx @@ -1,12 +1,14 @@ -import {useState} from 'react' -import {ShowCitation, CitationsBlock} from './citations' -import {GlossarySpan} from './glossary' -import type {Citation, AssistantEntry as AssistantType} from '../types' +import { useState } from "react"; +import { ShowCitation, CitationsBlock } from "./citations"; +import { GlossarySpan } from "./glossary"; +import type { Citation, AssistantEntry as AssistantType } from "../types"; -export const AssistantEntry: React.FC<{entry: AssistantType}> = ({entry}) => { +export const AssistantEntry: React.FC<{ entry: AssistantType }> = ({ + entry, +}) => { return (
- {entry.content.split('\n').map((paragraph, i) => ( + {entry.content.split("\n").map((paragraph, i) => ( = ({entry}) => { }
- ) -} + ); +}; diff --git a/web/src/components/chat.tsx b/web/src/components/chat.tsx index b54d265..1987728 100644 --- a/web/src/components/chat.tsx +++ b/web/src/components/chat.tsx @@ -1,5 +1,5 @@ -import {useState, useEffect} from 'react' -import {queryLLM, getStampyContent, runSearch} from '../hooks/useSearch' +import { useState, useEffect } from "react"; +import { queryLLM, getStampyContent, runSearch } from "../hooks/useSearch"; import type { CurrentSearch, @@ -8,102 +8,111 @@ import type { AssistantEntry as AssistantEntryType, LLMSettings, Followup, -} from '../types' -import {SearchBox} from '../components/searchbox' -import {AssistantEntry} from '../components/assistant' -import {Entry as EntryTag} from '../components/entry' +} from "../types"; +import { SearchBox } from "../components/searchbox"; +import { AssistantEntry } from "../components/assistant"; +import { Entry as EntryTag } from "../components/entry"; -const MAX_FOLLOWUPS = 4 +const MAX_FOLLOWUPS = 4; type State = | { - state: 'idle' + state: "idle"; } | { - state: 'loading' - phase: 'semantic' | 'prompt' | 'llm' - citations: Citation[] + state: "loading"; + phase: "semantic" | "prompt" | "llm"; + citations: Citation[]; } | { - state: 'streaming' - response: AssistantEntryType - } + state: "streaming"; + response: AssistantEntryType; + }; // smooth-scroll to the bottom of the window if we're already less than 30% a screen away // note: finicky interaction with "smooth" - maybe fix later. function scroll30() { - if (document.documentElement.scrollHeight - window.scrollY > window.innerHeight * 1.3) return - window.scrollTo({top: document.body.scrollHeight, behavior: 'smooth'}) + if ( + document.documentElement.scrollHeight - window.scrollY > + window.innerHeight * 1.3 + ) + return; + window.scrollTo({ top: document.body.scrollHeight, behavior: "smooth" }); } type ChatParams = { - sessionId: string - settings: LLMSettings - onQuery?: (q: string) => any - onNewEntry?: (history: Entry[]) => any -} + sessionId: string; + settings: LLMSettings; + onQuery?: (q: string) => any; + onNewEntry?: (history: Entry[]) => any; +}; -const Chat = ({sessionId, settings, onQuery, onNewEntry}: ChatParams) => { - const [entries, setEntries] = useState([]) - const [current, setCurrent] = useState() - const [citations, setCitations] = useState([]) +const Chat = ({ sessionId, settings, onQuery, onNewEntry }: ChatParams) => { + const [entries, setEntries] = useState([]); + const [current, setCurrent] = useState(); + const [citations, setCitations] = useState([]); const updateCurrent = (current: CurrentSearch) => { - setCurrent(current) - if (current?.phase === 'streaming') { - scroll30() + setCurrent(current); + if (current?.phase === "streaming") { + scroll30(); } - } + }; - const updateCitations = (allCitations: Citation[], current?: CurrentSearch) => { - if (!current) return + const updateCitations = ( + allCitations: Citation[], + current?: CurrentSearch + ) => { + if (!current) return; - const entryCitations = Array.from(current.citationsMap.values()) + const entryCitations = Array.from(current.citationsMap.values()); if (!entryCitations.some((c) => !c.index)) { // All of the entries citations have indexes, so there weren't any changes since the last check - return + return; } // Get a mapping of all known citations, so as to reuse them if they appear again - const citationsMapping = Object.fromEntries(allCitations.map((c) => [c.title + c.url, c.index])) + const citationsMapping = Object.fromEntries( + allCitations.map((c) => [c.title + c.url, c.index]) + ); entryCitations.forEach((c) => { - const hash = c.title + c.url - const index = citationsMapping[hash] + const hash = c.title + c.url; + const index = citationsMapping[hash]; if (!index) { - c.index = allCitations.length + 1 - allCitations.push(c) + c.index = allCitations.length + 1; + allCitations.push(c); } else { - c.index = index + c.index = index; } - }) - setCitations(allCitations) - setCurrent(current) - } + }); + setCitations(allCitations); + setCurrent(current); + }; const addEntry = (entry: Entry) => { setEntries((prev) => { - const entries = [...prev, entry] + const entries = [...prev, entry]; if (onNewEntry) { - onNewEntry(entries) + onNewEntry(entries); } - return entries - }) - } + return entries; + }); + }; const search = async ( query: string, - query_source: 'search' | 'followups', + query_source: "search" | "followups", enable: (f_set: Followup[] | ((fs: Followup[]) => Followup[])) => void, controller: AbortController ) => { // clear the query box, append to entries const userEntry: Entry = { - role: 'user', - content: query_source === 'search' ? query : query.split('\n', 2)[1]!, - } + role: "user", + content: query_source === "search" ? query : query.split("\n", 2)[1]!, + }; - const {result, followups} = await runSearch( + const { result, followups } = await runSearch( query, query_source, settings, @@ -111,44 +120,46 @@ const Chat = ({sessionId, settings, onQuery, onNewEntry}: ChatParams) => { updateCurrent, sessionId, controller - ) - if (result.content !== 'aborted') { - addEntry(userEntry) - addEntry(result) - enable(followups || []) - scroll30() + ); + if (result.content !== "aborted") { + addEntry(userEntry); + addEntry(result); + enable(followups || []); + scroll30(); } else { - enable([]) + enable([]); } - setCurrent(undefined) - } + setCurrent(undefined); + }; - var last_entry = <> + var last_entry = <>; switch (current?.phase) { - case 'semantic': - last_entry =

Loading: Performing semantic search...

- break - case 'prompt': - last_entry =

Loading: Creating prompt...

- break - case 'llm': - last_entry =

Loading: Waiting for LLM...

- break - case 'streaming': - updateCitations(citations, current) - last_entry = - break - case 'followups': + case "semantic": + last_entry =

Loading: Performing semantic search...

; + break; + case "prompt": + last_entry =

Loading: Creating prompt...

; + break; + case "llm": + last_entry =

Loading: Waiting for LLM...

; + break; + case "streaming": + updateCitations(citations, current); + last_entry = ; + break; + case "followups": last_entry = ( <>

Checking for followups...

- ) - break + ); + break; default: - last_entry = - break + last_entry = ( + + ); + break; } return ( @@ -160,7 +171,7 @@ const Chat = ({sessionId, settings, onQuery, onNewEntry}: ChatParams) => { {last_entry} - ) -} + ); +}; -export default Chat +export default Chat; diff --git a/web/src/components/citations.tsx b/web/src/components/citations.tsx index a1829c5..8444e5b 100644 --- a/web/src/components/citations.tsx +++ b/web/src/components/citations.tsx @@ -1,5 +1,5 @@ -import type {Citation} from '../types' -import {Colours, A} from './html' +import type { Citation } from "../types"; +import { Colours, A } from "./html"; export const formatCitations: (text: string) => string = (text) => { // ---------------------- normalize citation form ---------------------- @@ -13,10 +13,10 @@ export const formatCitations: (text: string) => string = (text) => { (block: string) => block - .split(',') + .split(",") .map((x) => x.trim()) - .join('][') - ) + .join("][") + ); // transform all things that look like [(a), (b), (c)] into [(a)][(b)][(c)] response = response.replace( @@ -24,107 +24,118 @@ export const formatCitations: (text: string) => string = (text) => { (block: string) => block - .split(',') + .split(",") .map((x) => x.trim()) - .join('][') - ) + .join("][") + ); // transform all things that look like [(a)] into [a] - response = response.replace(/\[\(([a-z]+)\)\]/g, (_match: string, x: string) => `[${x}]`) + response = response.replace( + /\[\(([a-z]+)\)\]/g, + (_match: string, x: string) => `[${x}]` + ); // transform all things that look like [ a ] into [a] - response = response.replace(/\[\s*([a-z]+)\s*\]/g, (_match: string, x: string) => `[${x}]`) - return response -} - -export const findCitations: (text: string, citations: Citation[]) => Map = ( - text, - citations -) => { + response = response.replace( + /\[\s*([a-z]+)\s*\]/g, + (_match: string, x: string) => `[${x}]` + ); + return response; +}; + +export const findCitations: ( + text: string, + citations: Citation[] +) => Map = (text, citations) => { // figure out what citations are in the response, and map them appropriately - const cite_map = new Map() + const cite_map = new Map(); // scan a regex for [x] over the response. If x isn't in the map, add it. // (note: we're actually doing this twice - once on parsing, once on render. // if that looks like a problem, we could swap from strings to custom ropes). - const regex = /\[([a-z]+)\]/g - let match + const regex = /\[([a-z]+)\]/g; + let match; while ((match = regex.exec(text)) !== null) { - const letter = match[1] - if (!letter || cite_map.has(letter!)) continue + const letter = match[1]; + if (!letter || cite_map.has(letter!)) continue; - const citation = citations[letter.charCodeAt(0) - 'a'.charCodeAt(0)] - if (!citation) continue + const citation = citations[letter.charCodeAt(0) - "a".charCodeAt(0)]; + if (!citation) continue; - cite_map.set(letter!, citation) + cite_map.set(letter!, citation); } - return cite_map -} + return cite_map; +}; -export const ShowCitation: React.FC<{citation: Citation}> = ({citation}) => { - var c_str = citation.title +export const ShowCitation: React.FC<{ citation: Citation }> = ({ + citation, +}) => { + var c_str = citation.title; - if (citation.authors && citation.authors.length > 0) c_str += ' - ' + citation.authors.join(', ') - if (citation.date && citation.date !== '') c_str += ' - ' + citation.date + if (citation.authors && citation.authors.length > 0) + c_str += " - " + citation.authors.join(", "); + if (citation.date && citation.date !== "") c_str += " - " + citation.date; // if we don't have a url, link to a duckduckgo search for the title instead const url = - citation.url && citation.url !== '' + citation.url && citation.url !== "" ? citation.url - : `https://duckduckgo.com/?q=${encodeURIComponent(citation.title)}` + : `https://duckduckgo.com/?q=${encodeURIComponent(citation.title)}`; return ( [{citation.index}]

{c_str}

- ) -} + ); +}; -export const CitationRef: React.FC<{citation?: Citation}> = ({citation}) => { - if (!citation) return null +export const CitationRef: React.FC<{ citation?: Citation }> = ({ + citation, +}) => { + if (!citation) return null; const url = - citation.url && citation.url !== '' + citation.url && citation.url !== "" ? citation.url - : `https://duckduckgo.com/?q=${encodeURIComponent(citation.title)}` + : `https://duckduckgo.com/?q=${encodeURIComponent(citation.title)}`; return ( [{citation.index}] - ) -} + ); +}; export const CitationsBlock: React.FC<{ - text: string - citations: Map - textRenderer: (t: string) => any -}> = ({text, citations, textRenderer}) => { - const regex = /\[([a-z]+)\]/g + text: string; + citations: Map; + textRenderer: (t: string) => any; +}> = ({ text, citations, textRenderer }) => { + const regex = /\[([a-z]+)\]/g; return (

- {' '} + {" "} {text.split(regex).map((part, i) => { // When splitting, the even parts are basic text sections, while the odd ones are // citations if (i % 2 == 0) { - return textRenderer(part) + return textRenderer(part); } else { - return + return ; } })}

- ) -} + ); +}; diff --git a/web/src/components/controls.tsx b/web/src/components/controls.tsx index 260c6dd..a92ae63 100644 --- a/web/src/components/controls.tsx +++ b/web/src/components/controls.tsx @@ -1,6 +1,12 @@ -import type {Mode} from '../types' +import type { Mode } from "../types"; -export const Controls = ({mode, setMode}: {mode: [Mode, boolean]; setMode: (m: any) => void}) => { +export const Controls = ({ + mode, + setMode, +}: { + mode: [Mode, boolean]; + setMode: (m: any) => void; +}) => { { /* three buttons for the three modes, place far right, 1rem between each */ } @@ -8,10 +14,11 @@ export const Controls = ({mode, setMode}: {mode: [Mode, boolean]; setMode: (m: a
- ) -} + ); +}; diff --git a/web/src/components/entry.tsx b/web/src/components/entry.tsx index c6d6341..2e052b6 100644 --- a/web/src/components/entry.tsx +++ b/web/src/components/entry.tsx @@ -4,14 +4,14 @@ import type { ErrorMessage, StampyMessage, UserEntry, -} from '../types' -import {AssistantEntry} from './assistant' -import {GlossarySpan} from './glossary' -import Image from 'next/image' -import logo from '../logo.svg' -import TextareaAutosize from 'react-textarea-autosize' +} from "../types"; +import { AssistantEntry } from "./assistant"; +import { GlossarySpan } from "./glossary"; +import Image from "next/image"; +import logo from "../logo.svg"; +import TextareaAutosize from "react-textarea-autosize"; -export const User = ({entry}: {entry: UserEntry}) => { +export const User = ({ entry }: { entry: UserEntry }) => { return (
  • { value={entry.content} />
  • - ) -} + ); +}; -export const Error = ({entry}: {entry: ErrorMessage}) => { +export const Error = ({ entry }: { entry: ErrorMessage }) => { return (
  • -

    {entry.content}

    +

    + {" "} + {entry.content}{" "} +

  • - ) -} + ); +}; -export const Assistant = ({entry}: {entry: AssistantEntryType}) => { +export const Assistant = ({ entry }: { entry: AssistantEntryType }) => { return (
  • - ) -} + ); +}; -export const Stampy = ({entry}: {entry: StampyMessage}) => { +export const Stampy = ({ entry }: { entry: StampyMessage }) => { return (
  • - ) -} + ); +}; -export const Entry = ({entry}: {entry: EntryType}) => { +export const Entry = ({ entry }: { entry: EntryType }) => { switch (entry.role) { - case 'user': - return - case 'error': - return - case 'assistant': - return - case 'stampy': - return + case "user": + return ; + case "error": + return ; + case "assistant": + return ; + case "stampy": + return ; } -} +}; diff --git a/web/src/components/glossary.tsx b/web/src/components/glossary.tsx index 0733ef7..8d4994f 100644 --- a/web/src/components/glossary.tsx +++ b/web/src/components/glossary.tsx @@ -1,30 +1,32 @@ -import {createContext, useContext} from 'react' +import { createContext, useContext } from "react"; type GlossaryItem = { - term: string - pageid: string - contents: string -} + term: string; + pageid: string; + contents: string; +}; -export type Glossary = Map +export type Glossary = Map; -export const GlossaryContext = createContext<{g: Glossary; r: RegExp} | null>(null) +export const GlossaryContext = createContext<{ g: Glossary; r: RegExp } | null>( + null +); // A component which wraps arbitrary html in a span, and injects glossary terms // into it as hoverable pop-up links. The text is immediately rendered normally, // but after the glossary is loaded (which happens once per page, asynchronously), // the glossary terms are replaced with elements. -export const GlossarySpan: React.FC<{content: string}> = ({content}) => { - const g = useContext(GlossaryContext) +export const GlossarySpan: React.FC<{ content: string }> = ({ content }) => { + const g = useContext(GlossaryContext); // If the glossary hasn't loaded yet, just render the text normally. if (g == null) { - return + return ; } - const glossary = g.g - const glossaryRegex = g.r + const glossary = g.g; + const glossaryRegex = g.r; // Otherwise, replace glossary terms with links. We can do this in // O(n * sum of term lengths) by finding String.prototype.indexOf of @@ -36,17 +38,17 @@ export const GlossarySpan: React.FC<{content: string}> = ({content}) => { { - const item = glossary.get(match.toLowerCase()) - if (item == undefined) return match + const item = glossary.get(match.toLowerCase()); + if (item == undefined) return match; - const hover_content = item.contents - const pageid = item.pageid + const hover_content = item.contents; + const pageid = item.pageid; - if (pageid == undefined || pageid.trim() == '') { + if (pageid == undefined || pageid.trim() == "") { return `
    ${hover_content}
    ${match} - ` + `; } else { return `
    ${hover_content}
    @@ -55,10 +57,10 @@ export const GlossarySpan: React.FC<{content: string}> = ({content}) => { class="glossary-link"> ${match} - ` + `; } }), }} /> - ) -} + ); +}; diff --git a/web/src/components/header.tsx b/web/src/components/header.tsx index 6a967c8..1941361 100644 --- a/web/src/components/header.tsx +++ b/web/src/components/header.tsx @@ -1,11 +1,13 @@ -import React from 'react' -import Link from 'next/link' -import Image from 'next/image' -import logo from '../logo.svg' +import React from "react"; +import Link from "next/link"; +import Image from "next/image"; +import logo from "../logo.svg"; -const Header: React.FC<{page: 'index' | 'semantic' | 'playground'}> = ({page}) => { +const Header: React.FC<{ page: "index" | "semantic" | "playground" }> = ({ + page, +}) => { const sidebar = - page === 'index' ? ( + page === "index" ? ( Show Sources @@ -13,7 +15,7 @@ const Header: React.FC<{page: 'index' | 'semantic' | 'playground'}> = ({page}) = Go Chat - ) + ); return (
    @@ -21,7 +23,7 @@ const Header: React.FC<{page: 'index' | 'semantic' | 'playground'}> = ({page}) =

    AI Safety Chatbot

    {sidebar}
    - ) -} + ); +}; -export default Header +export default Header; diff --git a/web/src/components/html.tsx b/web/src/components/html.tsx index e23c6dc..8312bb7 100644 --- a/web/src/components/html.tsx +++ b/web/src/components/html.tsx @@ -5,28 +5,28 @@ // the source file for it to be included in the build export const Colours = [ - 'bg-red-100 border-red-300 text-red-800', - 'bg-amber-100 border-amber-300 text-amber-800', - 'bg-orange-100 border-orange-300 text-orange-800', - 'bg-lime-100 border-lime-300 text-lime-800', - 'bg-green-100 border-green-300 text-green-800', - 'bg-cyan-100 border-cyan-300 text-cyan-800', - 'bg-blue-100 border-blue-300 text-blue-800', - 'bg-violet-100 border-violet-300 text-violet-800', - 'bg-pink-100 border-pink-300 text-pink-800', -] + "bg-red-100 border-red-300 text-red-800", + "bg-amber-100 border-amber-300 text-amber-800", + "bg-orange-100 border-orange-300 text-orange-800", + "bg-lime-100 border-lime-300 text-lime-800", + "bg-green-100 border-green-300 text-green-800", + "bg-cyan-100 border-cyan-300 text-cyan-800", + "bg-blue-100 border-blue-300 text-blue-800", + "bg-violet-100 border-violet-300 text-violet-800", + "bg-pink-100 border-pink-300 text-pink-800", +]; export const A: React.FC<{ - href: string - className?: string - children: React.ReactNode -}> = ({href, className, children}) => { + href: string; + className?: string; + children: React.ReactNode; +}> = ({ href, className, children }) => { // link element that only populates the href field if the contents are there - return href && href !== '' ? ( + return href && href !== "" ? ( {children} ) : ( {children} - ) -} + ); +}; diff --git a/web/src/components/page.tsx b/web/src/components/page.tsx index 23f899e..f64337b 100644 --- a/web/src/components/page.tsx +++ b/web/src/components/page.tsx @@ -1,8 +1,11 @@ -import React, {ReactNode} from 'react' -import Head from 'next/head' -import Header from './header' +import React, { ReactNode } from "react"; +import Head from "next/head"; +import Header from "./header"; -const Page: React.FC<{children: ReactNode; page: 'index' | 'semantic'}> = ({page, children}) => { +const Page: React.FC<{ children: ReactNode; page: "index" | "semantic" }> = ({ + page, + children, +}) => { return ( <> @@ -13,6 +16,6 @@ const Page: React.FC<{children: ReactNode; page: 'index' | 'semantic'}> = ({page {children} - ) -} -export default Page + ); +}; +export default Page; diff --git a/web/src/components/searchbox.tsx b/web/src/components/searchbox.tsx index 07b827c..f54470f 100644 --- a/web/src/components/searchbox.tsx +++ b/web/src/components/searchbox.tsx @@ -1,100 +1,106 @@ -import React from 'react' -import {useState, useEffect} from 'react' -import type {Followup} from '../types' -import TextareaAutosize from 'react-textarea-autosize' -import dynamic from 'next/dynamic' +import React from "react"; +import { useState, useEffect } from "react"; +import type { Followup } from "../types"; +import TextareaAutosize from "react-textarea-autosize"; +import dynamic from "next/dynamic"; // initial questions to fill the search box with. export const initialQuestions: string[] = [ - 'Are there any regulatory efforts aimed at addressing AI safety and alignment concerns?', - 'How can I help with AI safety and alignment?', - 'How could a predictive model - like an LLM - act like an agent?', + "Are there any regulatory efforts aimed at addressing AI safety and alignment concerns?", + "How can I help with AI safety and alignment?", + "How could a predictive model - like an LLM - act like an agent?", "How could an AI possibly be an x-risk when some populations aren't even connected to the internet?", "I'm not convinced, why is this important?", - 'Summarize the differences in opinion between Eliezer Yudkowsky and Paul Christiano.', + "Summarize the differences in opinion between Eliezer Yudkowsky and Paul Christiano.", 'What are "RAAPs"?', 'What are "scaling laws" and how are they relevant to safety?', - 'What are some of the different research approaches?', - 'What are the differences between Inner and Outer alignment?', + "What are some of the different research approaches?", + "What are the differences between Inner and Outer alignment?", 'What does the term "x-risk" mean?', 'What is "FOOM"?', 'What is "instrumental convergence"?', - 'What is a hard takeoff?', - 'What is a mesa-optimizer?', - 'What is AI safety and alignment?', - 'What is an AI arms race?', - 'What is an Intelligence Explosion?', + "What is a hard takeoff?", + "What is a mesa-optimizer?", + "What is AI safety and alignment?", + "What is an AI arms race?", + "What is an Intelligence Explosion?", 'What is the "orthogonality thesis"?', 'Why would we expect AI to be "misaligned by default"?', -] +]; const SearchBoxInternal: React.FC<{ search: ( query: string, - query_source: 'search' | 'followups', + query_source: "search" | "followups", enable: (f_set: Followup[] | ((fs: Followup[]) => Followup[])) => void, controller: AbortController - ) => void - onQuery?: (q: string) => any -}> = ({search, onQuery}) => { - const initial_query = initialQuestions[Math.floor(Math.random() * initialQuestions.length)] || '' + ) => void; + onQuery?: (q: string) => any; +}> = ({ search, onQuery }) => { + const initial_query = + initialQuestions[Math.floor(Math.random() * initialQuestions.length)] || ""; - const [query, setQuery] = useState(initial_query) - const [loading, setLoading] = useState(false) - const [followups, setFollowups] = useState([]) - const [controller, setController] = useState(new AbortController()) + const [query, setQuery] = useState(initial_query); + const [loading, setLoading] = useState(false); + const [followups, setFollowups] = useState([]); + const [controller, setController] = useState(new AbortController()); - const inputRef = React.useRef(null) + const inputRef = React.useRef(null); // because everything is async, I can't just manually set state at the // point we do a search. Instead it needs to be passed into the search // method, for some reason. const enable = - (controller: AbortController) => (f_set: Followup[] | ((fs: Followup[]) => Followup[])) => { - if (!controller.signal.aborted) setQuery('') + (controller: AbortController) => + (f_set: Followup[] | ((fs: Followup[]) => Followup[])) => { + if (!controller.signal.aborted) setQuery(""); - setLoading(false) - setFollowups(f_set) - } + setLoading(false); + setFollowups(f_set); + }; useEffect(() => { // set focus on the input box - if (!loading) inputRef.current?.focus() - }, [loading]) + if (!loading) inputRef.current?.focus(); + }, [loading]); // on first mount focus and set cursor to end of input useEffect(() => { - if (!inputRef.current) return - inputRef.current.focus() - inputRef.current.selectionStart = inputRef.current.textLength - inputRef.current.selectionEnd = inputRef.current.textLength - }, []) + if (!inputRef.current) return; + inputRef.current.focus(); + inputRef.current.selectionStart = inputRef.current.textLength; + inputRef.current.selectionEnd = inputRef.current.textLength; + }, []); - const runSearch = (query: string, searchtype: 'search' | 'followups') => () => { - if (loading || query.trim() === '') return + const runSearch = + (query: string, searchtype: "search" | "followups") => () => { + if (loading || query.trim() === "") return; - setLoading(true) - const controller = new AbortController() - setController(controller) - search(query, 'search', enable(controller), controller) - } - const cancelSearch = () => controller.abort() + setLoading(true); + const controller = new AbortController(); + setController(controller); + search(query, "search", enable(controller), controller); + }; + const cancelSearch = () => controller.abort(); return ( <>
    - {' '} + {" "} {followups.map((followup, i) => { return (
  • - ) + ); })}
    @@ -104,31 +110,31 @@ const SearchBoxInternal: React.FC<{ ref={inputRef} value={query} onChange={(e) => { - setQuery(e.target.value) - onQuery && onQuery(e.target.value) + setQuery(e.target.value); + onQuery && onQuery(e.target.value); }} onKeyDown={(e) => { // if , blur the input box - if (e.key === 'Escape') e.currentTarget.blur() + if (e.key === "Escape") e.currentTarget.blur(); // if without , submit the form (if it's not empty) - if (e.key === 'Enter' && !e.shiftKey) { - e.preventDefault() - runSearch(query, 'search') + if (e.key === "Enter" && !e.shiftKey) { + e.preventDefault(); + runSearch(query, "search"); } }} /> - ) -} + ); +}; export const SearchBox = dynamic(() => Promise.resolve(SearchBoxInternal), { ssr: false, -}) +}); diff --git a/web/src/hooks/useSearch.ts b/web/src/hooks/useSearch.ts index 697a328..cfc7777 100644 --- a/web/src/hooks/useSearch.ts +++ b/web/src/hooks/useSearch.ts @@ -1,4 +1,4 @@ -import {API_URL, STAMPY_URL, STAMPY_CONTENT_URL} from '../settings' +import { API_URL, STAMPY_URL, STAMPY_CONTENT_URL } from "../settings"; import type { Citation, Entry, @@ -9,48 +9,48 @@ import type { CurrentSearch, SearchResult, LLMSettings, -} from '../types' -import {formatCitations, findCitations} from '../components/citations' +} from "../types"; +import { formatCitations, findCitations } from "../components/citations"; -const MAX_FOLLOWUPS = 4 -const DATA_HEADER = 'data: ' -const EVENT_END_HEADER = 'event: close' +const MAX_FOLLOWUPS = 4; +const DATA_HEADER = "data: "; +const EVENT_END_HEADER = "event: close"; type HistoryEntry = { - role: 'error' | 'stampy' | 'assistant' | 'user' - content: string -} + role: "error" | "stampy" | "assistant" | "user"; + content: string; +}; const ignoreAbort = (error: Error) => { - if (error.name !== 'AbortError') { - throw error + if (error.name !== "AbortError") { + throw error; } -} +}; export async function* iterateData(res: Response) { - const reader = res.body!.getReader() - var message = '' + const reader = res.body!.getReader(); + var message = ""; while (true) { - const {done, value} = await reader.read() + const { done, value } = await reader.read(); - if (done) return + if (done) return; - const chunk = new TextDecoder('utf-8').decode(value) - for (const line of chunk.split('\n')) { + const chunk = new TextDecoder("utf-8").decode(value); + for (const line of chunk.split("\n")) { // Most times, it seems that a single read() call will be one SSE "message", // but I'll do the proper aggregation spec thing in case that's not always true. if (line.startsWith(EVENT_END_HEADER)) { - return + return; } else if (line.startsWith(DATA_HEADER)) { - message += line.slice(DATA_HEADER.length) + message += line.slice(DATA_HEADER.length); // Fixes #43 - } else if (line !== '') { - message += line - } else if (message !== '') { - yield JSON.parse(message) - message = '' + } else if (line !== "") { + message += line; + } else if (message !== "") { + yield JSON.parse(message); + message = ""; } } } @@ -61,50 +61,50 @@ export const extractAnswer = async ( setCurrent: (e: CurrentSearch) => void ): Promise => { var result: AssistantEntry = { - role: 'assistant', - content: '', + role: "assistant", + content: "", citations: [], citationsMap: new Map(), - } - var followups: Followup[] = [] + }; + var followups: Followup[] = []; for await (var data of iterateData(res)) { switch (data.state) { - case 'loading': - setCurrent({phase: data.phase, ...result}) - break + case "loading": + setCurrent({ phase: data.phase, ...result }); + break; - case 'citations': + case "citations": result = { ...result, citations: data?.citations || result?.citations || [], - } - setCurrent({phase: data.phase, ...result}) - break + }; + setCurrent({ phase: data.phase, ...result }); + break; - case 'streaming': + case "streaming": // incrementally build up the response - const content = formatCitations((result?.content || '') + data.content) + const content = formatCitations((result?.content || "") + data.content); result = { content, - role: 'assistant', + role: "assistant", citations: result?.citations || [], citationsMap: findCitations(content, result?.citations || []), - } - setCurrent({phase: 'streaming', ...result}) - break + }; + setCurrent({ phase: "streaming", ...result }); + break; - case 'followups': + case "followups": // add any potential followup questions - followups = data.followups.map((value: any) => value as Followup) - break - case 'done': - break - case 'error': - throw data.error + followups = data.followups.map((value: any) => value as Followup); + break; + case "done": + break; + case "error": + throw data.error; } } - return {result, followups} -} + return { result, followups }; +}; const fetchLLM = async ( sessionId: string, @@ -113,18 +113,18 @@ const fetchLLM = async ( history: HistoryEntry[], controller: AbortController ): Promise => - fetch(API_URL + '/chat', { + fetch(API_URL + "/chat", { signal: controller.signal, - method: 'POST', - cache: 'no-cache', + method: "POST", + cache: "no-cache", keepalive: true, headers: { - 'Content-Type': 'application/json', - Accept: 'text/event-stream', + "Content-Type": "application/json", + Accept: "text/event-stream", }, - body: JSON.stringify({sessionId, query, history, settings}), - }).catch(ignoreAbort) + body: JSON.stringify({ sessionId, query, history, settings }), + }).catch(ignoreAbort); export const queryLLM = async ( query: string, @@ -135,58 +135,59 @@ export const queryLLM = async ( controller: AbortController ): Promise => { // do SSE on a POST request. - const res = await fetchLLM(sessionId, query, settings, history, controller) + const res = await fetchLLM(sessionId, query, settings, history, controller); if (!res) { - return {result: {role: 'error', content: 'No response from server'}} + return { result: { role: "error", content: "No response from server" } }; } else if (!res.ok) { - return {result: {role: 'error', content: 'POST Error: ' + res.status}} + return { result: { role: "error", content: "POST Error: " + res.status } }; } try { - return await extractAnswer(res, setCurrent) + return await extractAnswer(res, setCurrent); } catch (e) { - if ((e as Error)?.name === 'AbortError') { - return {result: {role: 'error', content: 'aborted'}} + if ((e as Error)?.name === "AbortError") { + return { result: { role: "error", content: "aborted" } }; } return { - result: {role: 'error', content: e ? e.toString() : 'unknown error'}, - } + result: { role: "error", content: e ? e.toString() : "unknown error" }, + }; } -} +}; const cleanStampyContent = (contents: string) => contents.replace( //g, - (_, pre, linkParts, post) => `` - ) + (_, pre, linkParts, post) => + `` + ); export const getStampyContent = async ( questionId: string, controller: AbortController ): Promise => { const res = await fetch(`${STAMPY_CONTENT_URL}/${questionId}`, { - method: 'GET', + method: "GET", signal: controller.signal, headers: { - 'Content-Type': 'application/json', - Accept: 'application/json', + "Content-Type": "application/json", + Accept: "application/json", }, - }).catch(ignoreAbort) + }).catch(ignoreAbort); if (!res) { - return {result: {role: 'error', content: 'No response from server'}} + return { result: { role: "error", content: "No response from server" } }; } else if (!res.ok) { - return {result: {role: 'error', content: 'POST Error: ' + res.status}} + return { result: { role: "error", content: "POST Error: " + res.status } }; } - const data = (await res.json()).data + const data = (await res.json()).data; let result = { - role: 'stampy', + role: "stampy", content: cleanStampyContent(data.text), url: `${STAMPY_URL}/?state=${data.pageid}`, - } as StampyMessage + } as StampyMessage; // re-enable the searchbox, with the question that was just answered // removed from the list of possible followups. @@ -196,45 +197,54 @@ export const getStampyContent = async ( pageid: f.pageid!, text: f.title!, score: 0, - })) + })); - const fpids = new Set(f_new.map((f: Followup) => f.pageid)) + const fpids = new Set(f_new.map((f: Followup) => f.pageid)); const followups = (f_old: Followup[]): Followup[] => { - const f_old_filtered = f_old.filter((f) => f.pageid !== data.pageid && !fpids.has(f.pageid)) - return [...f_new, ...f_old_filtered].slice(0, MAX_FOLLOWUPS) - } + const f_old_filtered = f_old.filter( + (f) => f.pageid !== data.pageid && !fpids.has(f.pageid) + ); + return [...f_new, ...f_old_filtered].slice(0, MAX_FOLLOWUPS); + }; - return {followups, result} -} + return { followups, result }; +}; export const runSearch = async ( query: string, - query_source: 'search' | 'followups', + query_source: "search" | "followups", settings: LLMSettings, entries: Entry[], setCurrent: (c: CurrentSearch) => void, sessionId: string, controller: AbortController ): Promise => { - if (query_source === 'search') { + if (query_source === "search") { const history = entries - .filter((entry) => entry.role !== 'error') + .filter((entry) => entry.role !== "error") .map((entry) => ({ role: entry.role, content: entry.content.trim(), - })) - - return await queryLLM(query, settings, history, setCurrent, sessionId, controller) + })); + + return await queryLLM( + query, + settings, + history, + setCurrent, + sessionId, + controller + ); } else { // ----------------- HUMAN AUTHORED CONTENT RETRIEVAL ------------------ - const [questionId] = query.split('\n', 2) + const [questionId] = query.split("\n", 2); if (questionId) { - return await getStampyContent(questionId, controller) + return await getStampyContent(questionId, controller); } const result = { - role: 'error', - content: 'Could not extract Stampy id from ' + query, - } - return {result} as SearchResult + role: "error", + content: "Could not extract Stampy id from " + query, + }; + return { result } as SearchResult; } -} +}; diff --git a/web/src/pages/_app.tsx b/web/src/pages/_app.tsx index f35883f..c03e5ad 100644 --- a/web/src/pages/_app.tsx +++ b/web/src/pages/_app.tsx @@ -1,127 +1,129 @@ -import {type AppType} from 'next/dist/shared/lib/utils' -import {useEffect, useState} from 'react' +import { type AppType } from "next/dist/shared/lib/utils"; +import { useEffect, useState } from "react"; -import '~/styles/globals.css' +import "~/styles/globals.css"; -import {Glossary, GlossaryContext} from '../components/glossary' +import { Glossary, GlossaryContext } from "../components/glossary"; -const MyApp: AppType = ({Component, pageProps}) => { - const [glossary, setGlossary] = useState<{g: Glossary; r: RegExp} | null>(null) +const MyApp: AppType = ({ Component, pageProps }) => { + const [glossary, setGlossary] = useState<{ g: Glossary; r: RegExp } | null>( + null + ); // fetch glossary and compile regex once on load useEffect(() => { if (glossary === null) - tempHackFetch('/questions/glossary') + tempHackFetch("/questions/glossary") .then((res) => res.json()) .then((data) => { - const glossary: Glossary = new Map(Object.entries(data)) + const glossary: Glossary = new Map(Object.entries(data)); const keys = Array.from(glossary.keys()) .sort((a, b) => b.length - a.length) // sort by length descending - .map((k) => k.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&')) // escape regex chars - .map((k) => `\\b${k}\\b`) // add word boundaries + .map((k) => k.replace(/[-\/\\^$*+?.()|[\]{}]/g, "\\$&")) // escape regex chars + .map((k) => `\\b${k}\\b`); // add word boundaries - const regex = new RegExp(keys.join('|'), 'gim') - setGlossary({g: glossary, r: regex}) - }) - }, []) + const regex = new RegExp(keys.join("|"), "gim"); + setGlossary({ g: glossary, r: regex }); + }); + }, []); return ( - ) -} + ); +}; -export default MyApp +export default MyApp; // ------------------- hack until server endpoint is working ------------------- const GLOSSARY_JSON = { - 'chain of thought prompting': { - term: 'chain of thought prompting', - pageid: '8EL7', + "chain of thought prompting": { + term: "chain of thought prompting", + pageid: "8EL7", contents: - '

    Chain-of-thought prompting is a technique which makes a language model generate intermediate reasoning steps in its output.

    \n', + "

    Chain-of-thought prompting is a technique which makes a language model generate intermediate reasoning steps in its output.

    \n", }, - 'chain-of-thought': { - term: 'chain-of-thought', - pageid: '8EL7', + "chain-of-thought": { + term: "chain-of-thought", + pageid: "8EL7", contents: - '

    Chain-of-thought prompting is a technique which makes a language model generate intermediate reasoning steps in its output.

    \n', + "

    Chain-of-thought prompting is a technique which makes a language model generate intermediate reasoning steps in its output.

    \n", }, "goodhart's law": { term: "goodhart's law", - pageid: '8185', + pageid: "8185", contents: - '

    Goodhart’s law states that when a measure becomes a target, it ceases to be a good measure.

    \n', + "

    Goodhart’s law states that when a measure becomes a target, it ceases to be a good measure.

    \n", }, - 'the big g,': { - term: 'the big g,', - pageid: '8185', + "the big g,": { + term: "the big g,", + pageid: "8185", contents: - '

    Goodhart’s law states that when a measure becomes a target, it ceases to be a good measure.

    \n', + "

    Goodhart’s law states that when a measure becomes a target, it ceases to be a good measure.

    \n", }, - 'terminal goals': { - term: 'terminal goals', - pageid: '', + "terminal goals": { + term: "terminal goals", + pageid: "", contents: - '

    Goals which are valued as ends in themselves, rather than as instrumental to something else.

    \n', + "

    Goals which are valued as ends in themselves, rather than as instrumental to something else.

    \n", }, - 'terminal goal': { - term: 'terminal goal', - pageid: '', + "terminal goal": { + term: "terminal goal", + pageid: "", contents: - '

    Goals which are valued as ends in themselves, rather than as instrumental to something else.

    \n', + "

    Goals which are valued as ends in themselves, rather than as instrumental to something else.

    \n", }, - 'orthogonality thesis': { - term: 'orthogonality thesis', - pageid: '6568', + "orthogonality thesis": { + term: "orthogonality thesis", + pageid: "6568", contents: - '

    The thesis that any level of intelligence is compatible with any terminal goals. This implies that intelligence alone is not enough to make a system moral.

    \n', + "

    The thesis that any level of intelligence is compatible with any terminal goals. This implies that intelligence alone is not enough to make a system moral.

    \n", }, - 'instrumental convergence': { - term: 'instrumental convergence', - pageid: '897I', + "instrumental convergence": { + term: "instrumental convergence", + pageid: "897I", contents: - '

    Instrumental convergence is the idea that different AI agents, each with distinct terminal goals, will end up adopting many of the same instrumental goals.

    \n', + "

    Instrumental convergence is the idea that different AI agents, each with distinct terminal goals, will end up adopting many of the same instrumental goals.

    \n", }, - 'instrumentally convergent goals': { - term: 'instrumentally convergent goals', - pageid: '897I', + "instrumentally convergent goals": { + term: "instrumentally convergent goals", + pageid: "897I", contents: - '

    Instrumental convergence is the idea that different AI agents, each with distinct terminal goals, will end up adopting many of the same instrumental goals.

    \n', + "

    Instrumental convergence is the idea that different AI agents, each with distinct terminal goals, will end up adopting many of the same instrumental goals.

    \n", }, llm: { - term: 'llm', - pageid: '', + term: "llm", + pageid: "", contents: - '

    A large language model is an AI model which has been trained on a large body of text, in order to produce texts in a human-like way.

    \n', + "

    A large language model is an AI model which has been trained on a large body of text, in order to produce texts in a human-like way.

    \n", }, - 'large language model': { - term: 'large language model', - pageid: '', + "large language model": { + term: "large language model", + pageid: "", contents: - '

    A large language model is an AI model which has been trained on a large body of text, in order to produce texts in a human-like way.

    \n', + "

    A large language model is an AI model which has been trained on a large body of text, in order to produce texts in a human-like way.

    \n", }, - 'goal misgeneralization': { - term: 'goal misgeneralization', - pageid: '', + "goal misgeneralization": { + term: "goal misgeneralization", + pageid: "", contents: - '

    pursuing a different goal during deployment from the one that was pursued during training due to distribution shift

    \n', + "

    pursuing a different goal during deployment from the one that was pursued during training due to distribution shift

    \n", }, interpretability: { - term: 'interpretability', - pageid: '8241', + term: "interpretability", + pageid: "8241", contents: - '

    Interpretability is an area of alignment research that aims to make machine learning systems easier for humans to understand.

    \n', + "

    Interpretability is an area of alignment research that aims to make machine learning systems easier for humans to understand.

    \n", }, - 'existential risk': { - term: 'existential risk', - pageid: '89LL', + "existential risk": { + term: "existential risk", + pageid: "89LL", contents: "

    risks that threaten the destruction of humanity's long-term potential, including human extinction

    \n", }, -} +}; const tempHackFetch = (_url: string) => { return new Promise((resolve, _reject) => { @@ -129,7 +131,7 @@ const tempHackFetch = (_url: string) => { resolve({ ok: true, json: () => Promise.resolve(GLOSSARY_JSON), - } as unknown as Response) - }, 1000) - }) -} + } as unknown as Response); + }, 1000); + }); +}; diff --git a/web/src/pages/index.tsx b/web/src/pages/index.tsx index c721e2a..41482d1 100644 --- a/web/src/pages/index.tsx +++ b/web/src/pages/index.tsx @@ -1,46 +1,46 @@ -import {type NextPage} from 'next' -import {useState, useEffect} from 'react' -import Link from 'next/link' +import { type NextPage } from "next"; +import { useState, useEffect } from "react"; +import Link from "next/link"; -import {queryLLM, getStampyContent, runSearch} from '../hooks/useSearch' -import type {Mode} from '../types' -import Page from '../components/page' -import Chat from '../components/chat' -import {Controls} from '../components/controls' +import { queryLLM, getStampyContent, runSearch } from "../hooks/useSearch"; +import type { Mode } from "../types"; +import Page from "../components/page"; +import Chat from "../components/chat"; +import { Controls } from "../components/controls"; -const MAX_FOLLOWUPS = 4 +const MAX_FOLLOWUPS = 4; const Home: NextPage = () => { - const [sessionId, setSessionId] = useState('') - const [mode, setMode] = useState<[Mode, boolean]>(['default', false]) + const [sessionId, setSessionId] = useState(""); + const [mode, setMode] = useState<[Mode, boolean]>(["default", false]); // store mode in localstorage useEffect(() => { - if (mode[1]) localStorage.setItem('chat_mode', mode[0]) - }, [mode]) + if (mode[1]) localStorage.setItem("chat_mode", mode[0]); + }, [mode]); // initial load useEffect(() => { - const mode = (localStorage.getItem('chat_mode') as Mode) || 'default' - setMode([mode, true]) - setSessionId(crypto.randomUUID()) - }, []) + const mode = (localStorage.getItem("chat_mode") as Mode) || "default"; + setMode([mode, true]); + setSessionId(crypto.randomUUID()); + }, []); return (

    - WARNING: This is a very early prototype.{' '} + WARNING: This is a very early prototype.{" "} Feedback - {' '} + {" "} welcomed.

    - +
    - ) -} + ); +}; -export default Home +export default Home; diff --git a/web/src/pages/playground.tsx b/web/src/pages/playground.tsx index 29a2d68..1c7dbe2 100644 --- a/web/src/pages/playground.tsx +++ b/web/src/pages/playground.tsx @@ -1,108 +1,117 @@ -import type {NextPage} from 'next' -import {useState, useEffect, ChangeEvent} from 'react' -import TextareaAutosize from 'react-textarea-autosize' -import Head from 'next/head' -import Link from 'next/link' +import type { NextPage } from "next"; +import { useState, useEffect, ChangeEvent } from "react"; +import TextareaAutosize from "react-textarea-autosize"; +import Head from "next/head"; +import Link from "next/link"; -import {queryLLM, getStampyContent, runSearch} from '../hooks/useSearch' -import type {Mode, Entry, LLMSettings} from '../types' -import Header from '../components/header' -import Chat from '../components/chat' -import {Controls} from '../components/controls' +import { queryLLM, getStampyContent, runSearch } from "../hooks/useSearch"; +import type { Mode, Entry, LLMSettings } from "../types"; +import Header from "../components/header"; +import Chat from "../components/chat"; +import { Controls } from "../components/controls"; -const MAX_FOLLOWUPS = 4 +const MAX_FOLLOWUPS = 4; const DEFAULT_PROMPTS = { source: { prefix: - 'You are a helpful assistant knowledgeable about AI Alignment and Safety. ' + + "You are a helpful assistant knowledgeable about AI Alignment and Safety. " + 'Please give a clear and coherent answer to the user\'s questions.(written after "Q:") ' + - 'using the following sources. Each source is labeled with a letter. Feel free to ' + - 'use the sources in any order, and try to use multiple sources in your answers.\n\n', + "using the following sources. Each source is labeled with a letter. Feel free to " + + "use the sources in any order, and try to use multiple sources in your answers.\n\n", suffix: - '\n\n' + + "\n\n" + 'Before the question ("Q: "), there will be a history of previous questions and answers. ' + - 'These sources only apply to the last question. Any sources used in previous answers ' + - 'are invalid.', + "These sources only apply to the last question. Any sources used in previous answers " + + "are invalid.", }, question: - 'In your answer, please cite any claims you make back to each source ' + - 'using the format: [a], [b], etc. If you use multiple sources to make a claim ' + + "In your answer, please cite any claims you make back to each source " + + "using the format: [a], [b], etc. If you use multiple sources to make a claim " + 'cite all of them. For example: "AGI is concerning [c, d, e]."\n\n', modes: { - default: '', + default: "", concise: - 'Answer very concisely, getting to the crux of the matter in as ' + - 'few words as possible. Limit your answer to 1-2 sentences.\n\n', + "Answer very concisely, getting to the crux of the matter in as " + + "few words as possible. Limit your answer to 1-2 sentences.\n\n", rookie: "This user is new to the field of AI Alignment and Safety - don't " + - 'assume they know any technical terms or jargon. Still give a complete answer ' + - 'without patronizing the user, but take any extra time needed to ' + - 'explain new concepts or to illustrate your answer with examples. ' + - 'Put extra effort into explaining the intuition behind concepts ' + - 'rather than just giving a formal definition.\n\n', + "assume they know any technical terms or jargon. Still give a complete answer " + + "without patronizing the user, but take any extra time needed to " + + "explain new concepts or to illustrate your answer with examples. " + + "Put extra effort into explaining the intuition behind concepts " + + "rather than just giving a formal definition.\n\n", }, -} +}; const MODELS = { - 'gpt-3.5-turbo': {numTokens: 4095, topKBlocks: 10}, - 'gpt-3.5-turbo-16k': {numTokens: 16385, topKBlocks: 30}, - 'gpt-4': {numTokens: 8192, topKBlocks: 20}, + "gpt-3.5-turbo": { numTokens: 4095, topKBlocks: 10 }, + "gpt-3.5-turbo-16k": { numTokens: 16385, topKBlocks: 30 }, + "gpt-4": { numTokens: 8192, topKBlocks: 20 }, /* 'gpt-4-32k': {numTokens: 32768, topKBlocks: 30}, */ -} +}; const DEFAULT_SETTINGS = { prompts: DEFAULT_PROMPTS, - mode: 'default' as Mode, - completions: 'gpt-3.5-turbo', - encoder: 'cl100k_base', - topKBlocks: MODELS['gpt-3.5-turbo'].topKBlocks, // the number of blocks to use as citations - numTokens: MODELS['gpt-3.5-turbo'].numTokens, + mode: "default" as Mode, + completions: "gpt-3.5-turbo", + encoder: "cl100k_base", + topKBlocks: MODELS["gpt-3.5-turbo"].topKBlocks, // the number of blocks to use as citations + numTokens: MODELS["gpt-3.5-turbo"].numTokens, tokensBuffer: 50, // the number of tokens to leave as a buffer when calculating remaining tokens maxHistory: 10, // the max number of previous items to use as history historyFraction: 0.25, // the (approximate) fraction of num_tokens to use for history text before truncating contextFraction: 0.5, // the (approximate) fraction of num_tokens to use for context text before truncating -} -const ENCODERS = ['cl100k_base'] +}; +const ENCODERS = ["cl100k_base"]; -const updateIn = (obj: {[key: string]: any}, [head, ...rest]: string[], val: any) => { +const updateIn = ( + obj: { [key: string]: any }, + [head, ...rest]: string[], + val: any +) => { if (!head) { // No path provided - do nothing } else if (!rest || rest.length == 0) { - obj[head] = val + obj[head] = val; } else { - updateIn(obj[head], rest, val) + updateIn(obj[head], rest, val); } - return obj -} + return obj; +}; -type Parseable = string | number | undefined -type NumberParser = (v: Parseable) => number +type Parseable = string | number | undefined; +type NumberParser = (v: Parseable) => number; type InputFields = { - field: string - label: string - value?: Parseable - min?: string | number - max?: string | number - step?: string | number - parser?: NumberParser - updater: (v: any) => any -} + field: string; + label: string; + value?: Parseable; + min?: string | number; + max?: string | number; + step?: string | number; + parser?: NumberParser; + updater: (v: any) => any; +}; const between = - (min: Parseable, max: Parseable, parser: NumberParser, updater: (v: any) => any) => + ( + min: Parseable, + max: Parseable, + parser: NumberParser, + updater: (v: any) => any + ) => (event: ChangeEvent) => { - let num = parser((event.target as HTMLInputElement).value) + let num = parser((event.target as HTMLInputElement).value); if (isNaN(num)) { - return + return; } else if (min !== undefined && num < parser(min)) { - num = parser(min) + num = parser(min); } else if (max !== undefined && num > parser(max)) { - num = parser(max) + num = parser(max); } - updater(num) - } + updater(num); + }; -const SectionHeader = ({text}: {text: string}) => ( +const SectionHeader = ({ text }: { text: string }) => (

    {text}

    -) +); const NumberInput = ({ field, @@ -116,7 +125,7 @@ const NumberInput = ({ }: InputFields) => ( <> -) +); const Slider = ({ field, @@ -154,25 +163,26 @@ const Slider = ({ step={step} /> -) +); type ChatSettingsParams = { - settings: LLMSettings - updateSettings: (updater: (settings: LLMSettings) => LLMSettings) => void -} + settings: LLMSettings; + updateSettings: (updater: (settings: LLMSettings) => LLMSettings) => void; +}; -const ChatSettings = ({settings, updateSettings}: ChatSettingsParams) => { +const ChatSettings = ({ settings, updateSettings }: ChatSettingsParams) => { const changeVal = (field: string, value: any) => - updateSettings((prev) => ({...prev, [field]: value})) + updateSettings((prev) => ({ ...prev, [field]: value })); const update = (setting: string) => (event: ChangeEvent) => { - changeVal(setting, (event.target as HTMLInputElement).value) - } - const updateNum = (field: string) => (num: Parseable) => changeVal(field, num) + changeVal(setting, (event.target as HTMLInputElement).value); + }; + const updateNum = (field: string) => (num: Parseable) => + changeVal(field, num); return (
    - ) -} + ); +}; type ChatPromptParams = { - settings: LLMSettings - query: string - history: Entry[] - updateSettings: (updater: (settings: LLMSettings) => LLMSettings) => void -} + settings: LLMSettings; + query: string; + history: Entry[]; + updateSettings: (updater: (settings: LLMSettings) => LLMSettings) => void; +}; -const ChatPrompts = ({settings, query, history, updateSettings}: ChatPromptParams) => { +const ChatPrompts = ({ + settings, + query, + history, + updateSettings, +}: ChatPromptParams) => { const updatePrompt = (...path: string[]) => (event: ChangeEvent) => { const newPrompts = { - ...updateIn(settings.prompts || {}, path, (event.target as HTMLInputElement).value), - } - updateSettings((settings) => ({...settings, prompts: newPrompts})) - } + ...updateIn( + settings.prompts || {}, + path, + (event.target as HTMLInputElement).value + ), + }; + updateSettings((settings) => ({ ...settings, prompts: newPrompts })); + }; return (
    @@ -296,14 +321,14 @@ const ChatPrompts = ({settings, query, history, updateSettings}: ChatPromptParam
    (This is where sources will be injected)
    {history.length > 0 && ( )} @@ -324,48 +349,48 @@ const ChatPrompts = ({settings, query, history, updateSettings}: ChatPromptParam
    Q: {query}
    - ) -} + ); +}; const Playground: NextPage = () => { - const [sessionId, setSessionId] = useState('') - const [settings, updateSettings] = useState(DEFAULT_SETTINGS) + const [sessionId, setSessionId] = useState(""); + const [settings, updateSettings] = useState(DEFAULT_SETTINGS); - const [query, setQuery] = useState('') - const [history, setHistory] = useState([]) + const [query, setQuery] = useState(""); + const [history, setHistory] = useState([]); const setMode = (mode: [Mode, boolean]) => { if (mode[1]) { - localStorage.setItem('chat_mode', mode[0]) - updateSettings((settings) => ({...settings, mode: mode[0]})) + localStorage.setItem("chat_mode", mode[0]); + updateSettings((settings) => ({ ...settings, mode: mode[0] })); } - } + }; // initial load useEffect(() => { - const mode = (localStorage.getItem('chat_mode') as Mode) || 'default' - setMode([mode, true]) - setSessionId(crypto.randomUUID()) - }, []) + const mode = (localStorage.getItem("chat_mode") as Mode) || "default"; + setMode([mode, true]); + setSessionId(crypto.randomUUID()); + }, []); return ( <> AI Safety Info -
    +
    - +
    {
    - ) -} + ); +}; -export default Playground +export default Playground; diff --git a/web/src/pages/semantic.tsx b/web/src/pages/semantic.tsx index 3ecea03..ac23f21 100644 --- a/web/src/pages/semantic.tsx +++ b/web/src/pages/semantic.tsx @@ -1,47 +1,47 @@ -import {type NextPage} from 'next' -import React, {useState} from 'react' -import {API_URL} from '../settings' -import type {Followup} from '../types' -import Page from '../components/page' -import {SearchBox} from '../components/searchbox' +import { type NextPage } from "next"; +import React, { useState } from "react"; +import { API_URL } from "../settings"; +import type { Followup } from "../types"; +import Page from "../components/page"; +import { SearchBox } from "../components/searchbox"; const ignoreAbort = (error: Error) => { - if (error.name !== 'AbortError') { - throw error + if (error.name !== "AbortError") { + throw error; } -} +}; const Semantic: NextPage = () => { - const [results, setResults] = useState([]) + const [results, setResults] = useState([]); const semantic_search = async ( query: string, - _query_source: 'search' | 'followups', + _query_source: "search" | "followups", enable: (f_set: Followup[]) => void, controller: AbortController ) => { - const res = await fetch(API_URL + '/semantic', { - method: 'POST', + const res = await fetch(API_URL + "/semantic", { + method: "POST", signal: controller.signal, headers: { - 'Content-Type': 'application/json', - 'Access-Control-Allow-Origin': '*', + "Content-Type": "application/json", + "Access-Control-Allow-Origin": "*", }, - body: JSON.stringify({query: query}), - }).catch(ignoreAbort) + body: JSON.stringify({ query: query }), + }).catch(ignoreAbort); if (!res) { - enable([]) - return + enable([]); + return; } else if (!res.ok) { - console.error('load failure: ' + res.status) + console.error("load failure: " + res.status); } - enable([]) + enable([]); - const data = await res.json() + const data = await res.json(); - setResults(data) - } + setResults(data); + }; return ( @@ -49,14 +49,14 @@ const Semantic: NextPage = () => {
      {results.map((entry, i) => ( -
    • +
    • ))}
    - ) -} + ); +}; // Round trip test. If this works, our heavier usecase probably will (famous last words) // The one real difference is we'll want to send back a series of results as we get @@ -64,39 +64,39 @@ const Semantic: NextPage = () => { // shouldn't be too much harder. type SemanticEntry = { - title: string - authors: string[] - date: string - url: string - tags: string - text: string -} + title: string; + authors: string[]; + date: string; + url: string; + tags: string; + text: string; +}; -const ShowSemanticEntry: React.FC<{entry: SemanticEntry}> = ({entry}) => { +const ShowSemanticEntry: React.FC<{ entry: SemanticEntry }> = ({ entry }) => { return (
    {/* horizontally split first row, title on left, authors on right */}

    {entry.title}

    - {entry.authors.join(', ')} - {entry.date} + {entry.authors.join(", ")} - {entry.date}

    - {entry.text.split('\n').map((paragraph, i) => { - const p = paragraph.trim() - if (p === '') return <> - if (p === '.....') return
    + {entry.text.split("\n").map((paragraph, i) => { + const p = paragraph.trim(); + if (p === "") return <>; + if (p === ".....") return
    ; return ( -

    - {' '} - {paragraph}{' '} +

    + {" "} + {paragraph}{" "}

    - ) + ); })} Read more
    - ) -} + ); +}; -export default Semantic +export default Semantic; diff --git a/web/src/settings.ts b/web/src/settings.ts index 473c971..5dd7442 100644 --- a/web/src/settings.ts +++ b/web/src/settings.ts @@ -1,3 +1,5 @@ -export const API_URL = process.env.NEXT_PUBLIC_API_URL || 'http://127.0.0.1:3001' -export const STAMPY_URL = process.env.STAMPY_URL || 'https://aisafety.info' -export const STAMPY_CONTENT_URL = process.env.STAMPY_CONTENT_URL || `${API_URL}/human` +export const API_URL = + process.env.NEXT_PUBLIC_API_URL || "http://127.0.0.1:3001"; +export const STAMPY_URL = process.env.STAMPY_URL || "https://aisafety.info"; +export const STAMPY_CONTENT_URL = + process.env.STAMPY_CONTENT_URL || `${API_URL}/human`; diff --git a/web/src/types.ts b/web/src/types.ts index 48f4b58..f1216db 100644 --- a/web/src/types.ts +++ b/web/src/types.ts @@ -1,62 +1,62 @@ export type Citation = { - title: string - authors: string[] - date: string - url: string - index: number -} + title: string; + authors: string[]; + date: string; + url: string; + index: number; +}; export type Followup = { - text: string - pageid: string - score: number -} + text: string; + pageid: string; + score: number; +}; -export type Entry = UserEntry | AssistantEntry | ErrorMessage | StampyMessage +export type Entry = UserEntry | AssistantEntry | ErrorMessage | StampyMessage; export type UserEntry = { - role: 'user' - content: string -} + role: "user"; + content: string; +}; export type AssistantEntry = { - role: 'assistant' - content: string - citations: Citation[] - citationsMap: Map -} + role: "assistant"; + content: string; + citations: Citation[]; + citationsMap: Map; +}; export type ErrorMessage = { - role: 'error' - content: string -} + role: "error"; + content: string; +}; export type StampyMessage = { - role: 'stampy' - content: string - url: string -} + role: "stampy"; + content: string; + url: string; +}; export type SearchResult = { - followups?: Followup[] | ((f: Followup[]) => Followup[]) - result: Entry -} -export type CurrentSearch = (AssistantEntry & {phase?: string}) | undefined + followups?: Followup[] | ((f: Followup[]) => Followup[]); + result: Entry; +}; +export type CurrentSearch = (AssistantEntry & { phase?: string }) | undefined; -export type Mode = 'rookie' | 'concise' | 'default' +export type Mode = "rookie" | "concise" | "default"; export type LLMSettings = { prompts?: { - [key: string]: any - } - mode?: Mode - completions?: string - encoder?: string - topKBlocks?: number - numTokens?: number - tokensBuffer?: number - maxHistory?: number - historyFraction?: number - contextFraction?: number - [key: string]: any -} + [key: string]: any; + }; + mode?: Mode; + completions?: string; + encoder?: string; + topKBlocks?: number; + numTokens?: number; + tokensBuffer?: number; + maxHistory?: number; + historyFraction?: number; + contextFraction?: number; + [key: string]: any; +};