mirror of
https://github.com/SilasMarvin/lsp-ai.git
synced 2024-09-11 12:25:48 +03:00
fix: clippy warnings
This commit is contained in:
parent
b594d03e48
commit
19ed5188a0
@ -37,11 +37,10 @@ impl Crawl {
|
||||
}
|
||||
|
||||
let extension_to_match = triggered_file
|
||||
.map(|tf| {
|
||||
.and_then(|tf| {
|
||||
let path = std::path::Path::new(&tf);
|
||||
path.extension().map(|f| f.to_str().map(|f| f.to_owned()))
|
||||
})
|
||||
.flatten()
|
||||
.flatten();
|
||||
|
||||
if let Some(extension_to_match) = &extension_to_match {
|
||||
@ -70,7 +69,7 @@ impl Crawl {
|
||||
}
|
||||
} else {
|
||||
match (
|
||||
path.extension().map(|pe| pe.to_str()).flatten(),
|
||||
path.extension().and_then(|pe| pe.to_str()),
|
||||
&extension_to_match,
|
||||
) {
|
||||
(Some(path_extension), Some(extension_to_match)) => {
|
||||
|
@ -328,7 +328,7 @@ impl MemoryBackend for FileStore {
|
||||
prompt_type: PromptType,
|
||||
params: &Value,
|
||||
) -> anyhow::Result<Prompt> {
|
||||
let params: MemoryRunParams = params.try_into()?;
|
||||
let params: MemoryRunParams = params.into();
|
||||
self.build_code(position, prompt_type, params, true)
|
||||
}
|
||||
|
||||
|
@ -250,15 +250,14 @@ impl PostgresML {
|
||||
let documents: Vec<pgml::types::Json> = chunks
|
||||
.into_iter()
|
||||
.zip(&file_uris)
|
||||
.map(|(chunks, uri)| {
|
||||
.flat_map(|(chunks, uri)| {
|
||||
chunks
|
||||
.into_iter()
|
||||
.map(|chunk| {
|
||||
chunk_to_document(&uri, chunk, task_root_uri.as_deref())
|
||||
chunk_to_document(uri, chunk, task_root_uri.as_deref())
|
||||
})
|
||||
.collect::<Vec<Value>>()
|
||||
})
|
||||
.flatten()
|
||||
.map(|f: Value| f.into())
|
||||
.collect();
|
||||
if let Err(e) = task_collection
|
||||
@ -360,14 +359,10 @@ impl PostgresML {
|
||||
current_chunks_bytes += contents.len();
|
||||
let chunks: Vec<pgml::types::Json> = self
|
||||
.splitter
|
||||
.split_file_contents(&uri, &contents)
|
||||
.split_file_contents(uri, &contents)
|
||||
.into_iter()
|
||||
.map(|chunk| {
|
||||
chunk_to_document(
|
||||
&uri,
|
||||
chunk,
|
||||
self.config.client_params.root_uri.as_deref(),
|
||||
)
|
||||
chunk_to_document(uri, chunk, self.config.client_params.root_uri.as_deref())
|
||||
.into()
|
||||
})
|
||||
.collect();
|
||||
@ -384,7 +379,7 @@ impl PostgresML {
|
||||
}
|
||||
}
|
||||
// Upsert any remaining chunks
|
||||
if chunks_to_upsert.len() > 0 {
|
||||
if chunks_to_upsert.is_empty() {
|
||||
collection
|
||||
.upsert_documents(chunks_to_upsert, None)
|
||||
.await
|
||||
@ -474,7 +469,7 @@ impl PostgresML {
|
||||
Ok(true)
|
||||
})?;
|
||||
// Upsert any remaining documents
|
||||
if documents.len() > 0 {
|
||||
if documents.is_empty() {
|
||||
let mut collection = self.collection.clone();
|
||||
TOKIO_RUNTIME.spawn(async move {
|
||||
if let Err(e) = collection
|
||||
@ -505,7 +500,7 @@ impl MemoryBackend for PostgresML {
|
||||
prompt_type: PromptType,
|
||||
params: &Value,
|
||||
) -> anyhow::Result<Prompt> {
|
||||
let params: MemoryRunParams = params.try_into()?;
|
||||
let params: MemoryRunParams = params.into();
|
||||
let chunk_size = self.splitter.chunk_size();
|
||||
let total_allowed_characters = tokens_to_estimated_characters(params.max_context);
|
||||
|
||||
@ -530,8 +525,7 @@ impl MemoryBackend for PostgresML {
|
||||
.postgresml_config
|
||||
.embedding_model
|
||||
.as_ref()
|
||||
.map(|m| m.query_parameters.clone())
|
||||
.flatten()
|
||||
.and_then(|m| m.query_parameters.clone())
|
||||
{
|
||||
Some(query_parameters) => query_parameters,
|
||||
None => json!({
|
||||
@ -597,7 +591,7 @@ impl MemoryBackend for PostgresML {
|
||||
Prompt::ContextAndCode(ContextAndCodePrompt::new(
|
||||
context.to_owned(),
|
||||
format_file_excerpt(
|
||||
&position.text_document.uri.to_string(),
|
||||
position.text_document.uri.as_str(),
|
||||
&context_and_code.code,
|
||||
self.config.client_params.root_uri.as_deref(),
|
||||
),
|
||||
|
@ -338,7 +338,7 @@ async fn do_completion(
|
||||
let mut response = transformer_backend.do_completion(&prompt, params).await?;
|
||||
|
||||
if let Some(post_process) = config.get_completions_post_process() {
|
||||
response.insert_text = post_process_response(response.insert_text, &prompt, &post_process);
|
||||
response.insert_text = post_process_response(response.insert_text, &prompt, post_process);
|
||||
}
|
||||
|
||||
// Build and send the response
|
||||
|
@ -65,6 +65,6 @@ pub fn parse_tree(uri: &str, contents: &str, old_tree: Option<&Tree>) -> anyhow:
|
||||
let extension = extension.as_deref().unwrap_or("");
|
||||
let mut parser = utils_tree_sitter::get_parser_for_extension(extension)?;
|
||||
parser
|
||||
.parse(&contents, old_tree)
|
||||
.parse(contents, old_tree)
|
||||
.with_context(|| format!("parsing tree failed for {uri}"))
|
||||
}
|
||||
|
@ -55,11 +55,7 @@ impl TreeSitterCodeSplitter {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn split<'a, 'b, 'c>(
|
||||
&'a self,
|
||||
tree: &'b Tree,
|
||||
utf8: &'c [u8],
|
||||
) -> Result<Vec<Chunk<'c>>, SplitError> {
|
||||
pub fn split<'c>(&self, tree: &Tree, utf8: &'c [u8]) -> Result<Vec<Chunk<'c>>, SplitError> {
|
||||
let cursor = tree.walk();
|
||||
Ok(self
|
||||
.split_recursive(cursor, utf8)?
|
||||
@ -68,7 +64,7 @@ impl TreeSitterCodeSplitter {
|
||||
// Let's combine some of our smaller chunks together
|
||||
// We also want to do this in reverse as it (seems) to make more sense to combine code slices from bottom to top
|
||||
.try_fold(vec![], |mut acc, current| {
|
||||
if acc.len() == 0 {
|
||||
if acc.is_empty() {
|
||||
acc.push(current);
|
||||
Ok::<_, SplitError>(acc)
|
||||
} else {
|
||||
@ -94,9 +90,9 @@ impl TreeSitterCodeSplitter {
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn split_recursive<'a, 'b, 'c>(
|
||||
&'a self,
|
||||
mut cursor: TreeCursor<'b>,
|
||||
fn split_recursive<'c>(
|
||||
&self,
|
||||
mut cursor: TreeCursor<'_>,
|
||||
utf8: &'c [u8],
|
||||
) -> Result<Vec<Chunk<'c>>, SplitError> {
|
||||
let node = cursor.node();
|
||||
|
Loading…
Reference in New Issue
Block a user