diff --git a/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs b/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs index dd9c98e..d516adf 100644 --- a/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs +++ b/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs @@ -156,12 +156,6 @@ impl OpenAI { messages: Vec, params: OpenAIRunParams, ) -> anyhow::Result { - eprintln!("\n\n\n\n"); - for message in &messages { - eprintln!("{}:\n{}\n", message.role.to_string(), message.content); - } - eprintln!("\n\n\n\n"); - let client = reqwest::Client::new(); let token = self.get_token()?; let res: OpenAIChatResponse = client diff --git a/crates/lsp-ai/src/transformer_worker.rs b/crates/lsp-ai/src/transformer_worker.rs index aff089f..3597aba 100644 --- a/crates/lsp-ai/src/transformer_worker.rs +++ b/crates/lsp-ai/src/transformer_worker.rs @@ -342,7 +342,6 @@ async fn do_completion( // Get the response let mut response = transformer_backend.do_completion(&prompt, params).await?; - eprintln!("\n\n\n\nGOT RESPONSE: {}\n\n\n\n", response.insert_text); if let Some(post_process) = config.get_completions_post_process() { response.insert_text = post_process_response(response.insert_text, &prompt, &post_process);