diff --git a/crates/lsp-ai/src/crawl.rs b/crates/lsp-ai/src/crawl.rs index 4546d2c..d4634b8 100644 --- a/crates/lsp-ai/src/crawl.rs +++ b/crates/lsp-ai/src/crawl.rs @@ -37,11 +37,10 @@ impl Crawl { } let extension_to_match = triggered_file - .map(|tf| { + .and_then(|tf| { let path = std::path::Path::new(&tf); path.extension().map(|f| f.to_str().map(|f| f.to_owned())) }) - .flatten() .flatten(); if let Some(extension_to_match) = &extension_to_match { @@ -70,7 +69,7 @@ impl Crawl { } } else { match ( - path.extension().map(|pe| pe.to_str()).flatten(), + path.extension().and_then(|pe| pe.to_str()), &extension_to_match, ) { (Some(path_extension), Some(extension_to_match)) => { diff --git a/crates/lsp-ai/src/memory_backends/file_store.rs b/crates/lsp-ai/src/memory_backends/file_store.rs index f37ff58..51f3c58 100644 --- a/crates/lsp-ai/src/memory_backends/file_store.rs +++ b/crates/lsp-ai/src/memory_backends/file_store.rs @@ -328,7 +328,7 @@ impl MemoryBackend for FileStore { prompt_type: PromptType, params: &Value, ) -> anyhow::Result { - let params: MemoryRunParams = params.try_into()?; + let params: MemoryRunParams = params.into(); self.build_code(position, prompt_type, params, true) } diff --git a/crates/lsp-ai/src/memory_backends/postgresml/mod.rs b/crates/lsp-ai/src/memory_backends/postgresml/mod.rs index 255350d..010c052 100644 --- a/crates/lsp-ai/src/memory_backends/postgresml/mod.rs +++ b/crates/lsp-ai/src/memory_backends/postgresml/mod.rs @@ -250,15 +250,14 @@ impl PostgresML { let documents: Vec = chunks .into_iter() .zip(&file_uris) - .map(|(chunks, uri)| { + .flat_map(|(chunks, uri)| { chunks .into_iter() .map(|chunk| { - chunk_to_document(&uri, chunk, task_root_uri.as_deref()) + chunk_to_document(uri, chunk, task_root_uri.as_deref()) }) .collect::>() }) - .flatten() .map(|f: Value| f.into()) .collect(); if let Err(e) = task_collection @@ -360,15 +359,11 @@ impl PostgresML { current_chunks_bytes += contents.len(); let chunks: Vec = self .splitter - .split_file_contents(&uri, &contents) + .split_file_contents(uri, &contents) .into_iter() .map(|chunk| { - chunk_to_document( - &uri, - chunk, - self.config.client_params.root_uri.as_deref(), - ) - .into() + chunk_to_document(uri, chunk, self.config.client_params.root_uri.as_deref()) + .into() }) .collect(); chunks_to_upsert.extend(chunks); @@ -384,7 +379,7 @@ impl PostgresML { } } // Upsert any remaining chunks - if chunks_to_upsert.len() > 0 { + if chunks_to_upsert.is_empty() { collection .upsert_documents(chunks_to_upsert, None) .await @@ -474,7 +469,7 @@ impl PostgresML { Ok(true) })?; // Upsert any remaining documents - if documents.len() > 0 { + if documents.is_empty() { let mut collection = self.collection.clone(); TOKIO_RUNTIME.spawn(async move { if let Err(e) = collection @@ -505,7 +500,7 @@ impl MemoryBackend for PostgresML { prompt_type: PromptType, params: &Value, ) -> anyhow::Result { - let params: MemoryRunParams = params.try_into()?; + let params: MemoryRunParams = params.into(); let chunk_size = self.splitter.chunk_size(); let total_allowed_characters = tokens_to_estimated_characters(params.max_context); @@ -530,8 +525,7 @@ impl MemoryBackend for PostgresML { .postgresml_config .embedding_model .as_ref() - .map(|m| m.query_parameters.clone()) - .flatten() + .and_then(|m| m.query_parameters.clone()) { Some(query_parameters) => query_parameters, None => json!({ @@ -597,7 +591,7 @@ impl MemoryBackend for PostgresML { Prompt::ContextAndCode(ContextAndCodePrompt::new( context.to_owned(), format_file_excerpt( - &position.text_document.uri.to_string(), + position.text_document.uri.as_str(), &context_and_code.code, self.config.client_params.root_uri.as_deref(), ), diff --git a/crates/lsp-ai/src/transformer_worker.rs b/crates/lsp-ai/src/transformer_worker.rs index 7766a11..f7ab8e9 100644 --- a/crates/lsp-ai/src/transformer_worker.rs +++ b/crates/lsp-ai/src/transformer_worker.rs @@ -338,7 +338,7 @@ async fn do_completion( let mut response = transformer_backend.do_completion(&prompt, params).await?; if let Some(post_process) = config.get_completions_post_process() { - response.insert_text = post_process_response(response.insert_text, &prompt, &post_process); + response.insert_text = post_process_response(response.insert_text, &prompt, post_process); } // Build and send the response diff --git a/crates/lsp-ai/src/utils.rs b/crates/lsp-ai/src/utils.rs index 8b5b8b4..1178060 100644 --- a/crates/lsp-ai/src/utils.rs +++ b/crates/lsp-ai/src/utils.rs @@ -65,6 +65,6 @@ pub fn parse_tree(uri: &str, contents: &str, old_tree: Option<&Tree>) -> anyhow: let extension = extension.as_deref().unwrap_or(""); let mut parser = utils_tree_sitter::get_parser_for_extension(extension)?; parser - .parse(&contents, old_tree) + .parse(contents, old_tree) .with_context(|| format!("parsing tree failed for {uri}")) } diff --git a/crates/splitter-tree-sitter/src/lib.rs b/crates/splitter-tree-sitter/src/lib.rs index 49bf1a0..52e30ed 100644 --- a/crates/splitter-tree-sitter/src/lib.rs +++ b/crates/splitter-tree-sitter/src/lib.rs @@ -55,11 +55,7 @@ impl TreeSitterCodeSplitter { } } - pub fn split<'a, 'b, 'c>( - &'a self, - tree: &'b Tree, - utf8: &'c [u8], - ) -> Result>, SplitError> { + pub fn split<'c>(&self, tree: &Tree, utf8: &'c [u8]) -> Result>, SplitError> { let cursor = tree.walk(); Ok(self .split_recursive(cursor, utf8)? @@ -68,7 +64,7 @@ impl TreeSitterCodeSplitter { // Let's combine some of our smaller chunks together // We also want to do this in reverse as it (seems) to make more sense to combine code slices from bottom to top .try_fold(vec![], |mut acc, current| { - if acc.len() == 0 { + if acc.is_empty() { acc.push(current); Ok::<_, SplitError>(acc) } else { @@ -94,9 +90,9 @@ impl TreeSitterCodeSplitter { .collect()) } - fn split_recursive<'a, 'b, 'c>( - &'a self, - mut cursor: TreeCursor<'b>, + fn split_recursive<'c>( + &self, + mut cursor: TreeCursor<'_>, utf8: &'c [u8], ) -> Result>, SplitError> { let node = cursor.node();