lsp-ai/examples/helix/llama-cpp-fim.toml
2024-06-02 21:20:55 -07:00

39 lines
840 B
TOML

##############################
## Configuration for lsp-ai ##
##############################
[language-server.lsp-ai]
command = "lsp-ai"
[language-server.lsp-ai.config.memory]
file_store = { }
[language-server.lsp-ai.config.models.model1]
type = "llama_cpp"
repository = "stabilityai/stable-code-3b"
name = "stable-code-3b-Q5_K_M.gguf"
n_ctx = 2048
[language-server.lsp-ai.config.completion]
model = "model1"
[language-server.lsp-ai.config.completion.parameters]
max_tokens = 32
max_context = 1024
[language-server.lsp-ai.config.completion.parameters.fim]
start = "<fim_prefix>"
middle = "<fim_suffix>"
end = "<fim_middle>"
#################################
## Configuration for languages ##
#################################
[[language]]
name = "python"
language-servers = ["pyright", "lsp-ai"]
## Configure other languages here