feat(openai): 16k

This commit is contained in:
Stan Girard 2023-06-16 17:52:32 +02:00
parent 377afe61ab
commit f833de47bc
8 changed files with 10 additions and 11 deletions

View File

@ -27,7 +27,7 @@ class CustomSupabaseVectorStore(SupabaseVectorStore):
query: str,
user_id: str = "none",
table: str = "match_vectors",
k: int = 4,
k: int = 8,
threshold: float = 0.5,
**kwargs: Any
) -> List[Document]:

View File

@ -2,7 +2,6 @@ import os
import guidance
import openai
from logger import get_logger
logger = get_logger(__name__)

View File

@ -1,11 +1,11 @@
from typing import List, Tuple, Optional
from typing import List, Optional, Tuple
from uuid import UUID
from pydantic import BaseModel
class ChatMessage(BaseModel):
model: str = "gpt-3.5-turbo-0613"
model: str = "gpt-3.5-turbo-16k"
question: str
# A list of tuples where each tuple is (speaker, text)
history: List[Tuple[str, str]]

View File

@ -58,7 +58,7 @@ async def process_audio(upload_file: UploadFile, enable_summarization: bool, use
file_size = len(transcript.text.encode("utf-8"))
# Load chunk size and overlap from sidebar
chunk_size = 500
chunk_size = 250
chunk_overlap = 0
text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder(

View File

@ -31,7 +31,7 @@ async def process_file(file: UploadFile, loader_class, file_suffix, enable_summa
file_sha1 = compute_sha1_from_file(tmp_file.name)
os.remove(tmp_file.name)
chunk_size = 500
chunk_size = 250
chunk_overlap = 0
text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder(

View File

@ -22,7 +22,7 @@ async def process_github(repo, enable_summarization, user, supabase, user_openai
documents = loader.load()
os.system("rm -rf /tmp/" + random_dir_name)
chunk_size = 500
chunk_size = 250
chunk_overlap = 0
text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder(
chunk_size=chunk_size, chunk_overlap=chunk_overlap)

View File

@ -16,9 +16,9 @@ export const BrainConfigContext = createContext<ConfigContext | undefined>(
);
const defaultBrainConfig: BrainConfig = {
model: "gpt-3.5-turbo-0613",
model: "gpt-3.5-turbo-16k",
temperature: 0,
maxTokens: 500,
maxTokens: 1000,
keepLocal: true,
anthropicKey: undefined,
backendUrl: undefined,

View File

@ -21,14 +21,14 @@ export type ConfigContext = {
// export const openAiModels = ["gpt-3.5-turbo", "gpt-4"] as const; ## TODO activate GPT4 when not in demo mode
export const openAiModels = [
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
] as const;
export const openAiPaidModels = [
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-4",
"gpt-4-0613",
] as const;