From dd0b4eccf1fd9d440b6c066b23b97ff839ec10e6 Mon Sep 17 00:00:00 2001 From: Caleb Owens Date: Thu, 27 Jun 2024 21:50:44 +0200 Subject: [PATCH] Improve ai error handling (#4180) * Introduce a result type * Update AI error handling to use the result type * Handle ollama json parse error * Migrate using Error as the type that represents errors * Remove now useless condition * asdfasdf * Use andThen * Correct unit tests --- app/src/lib/ai/anthropicClient.ts | 18 +++- app/src/lib/ai/butlerClient.ts | 24 +++-- app/src/lib/ai/ollamaClient.ts | 55 ++++++----- app/src/lib/ai/openAIClient.ts | 22 +++-- app/src/lib/ai/service.test.ts | 88 +++++++++-------- app/src/lib/ai/service.ts | 56 ++++++----- app/src/lib/ai/types.ts | 3 +- app/src/lib/backend/httpClient.ts | 21 +++++ app/src/lib/branch/BranchCard.svelte | 33 ++++--- app/src/lib/commit/CommitMessageInput.svelte | 45 +++++---- app/src/lib/result.ts | 99 ++++++++++++++++++++ 11 files changed, 329 insertions(+), 135 deletions(-) create mode 100644 app/src/lib/result.ts diff --git a/app/src/lib/ai/anthropicClient.ts b/app/src/lib/ai/anthropicClient.ts index f68532ef9..d8d3459c8 100644 --- a/app/src/lib/ai/anthropicClient.ts +++ b/app/src/lib/ai/anthropicClient.ts @@ -1,8 +1,12 @@ import { SHORT_DEFAULT_COMMIT_TEMPLATE, SHORT_DEFAULT_BRANCH_TEMPLATE } from '$lib/ai/prompts'; +import { type AIClient, type AnthropicModelName, type Prompt } from '$lib/ai/types'; +import { buildFailureFromAny, ok, type Result } from '$lib/result'; import { fetch, Body } from '@tauri-apps/api/http'; -import type { AIClient, AnthropicModelName, Prompt } from '$lib/ai/types'; -type AnthropicAPIResponse = { content: { text: string }[] }; +type AnthropicAPIResponse = { + content: { text: string }[]; + error: { type: string; message: string }; +}; export class AnthropicAIClient implements AIClient { defaultCommitTemplate = SHORT_DEFAULT_COMMIT_TEMPLATE; @@ -13,7 +17,7 @@ export class AnthropicAIClient implements AIClient { private modelName: AnthropicModelName ) {} - async evaluate(prompt: Prompt) { + async evaluate(prompt: Prompt): Promise> { const body = Body.json({ messages: prompt, max_tokens: 1024, @@ -30,6 +34,12 @@ export class AnthropicAIClient implements AIClient { body }); - return response.data.content[0].text; + if (response.ok && response.data?.content?.[0]?.text) { + return ok(response.data.content[0].text); + } else { + return buildFailureFromAny( + `Anthropic returned error code ${response.status} ${response.data?.error?.message}` + ); + } } } diff --git a/app/src/lib/ai/butlerClient.ts b/app/src/lib/ai/butlerClient.ts index 56454ef24..d6d4f8211 100644 --- a/app/src/lib/ai/butlerClient.ts +++ b/app/src/lib/ai/butlerClient.ts @@ -1,4 +1,5 @@ import { SHORT_DEFAULT_BRANCH_TEMPLATE, SHORT_DEFAULT_COMMIT_TEMPLATE } from '$lib/ai/prompts'; +import { map, type Result } from '$lib/result'; import type { AIClient, ModelKind, Prompt } from '$lib/ai/types'; import type { HttpClient } from '$lib/backend/httpClient'; @@ -12,16 +13,19 @@ export class ButlerAIClient implements AIClient { private modelKind: ModelKind ) {} - async evaluate(prompt: Prompt) { - const response = await this.cloud.post<{ message: string }>('evaluate_prompt/predict.json', { - body: { - messages: prompt, - max_tokens: 400, - model_kind: this.modelKind - }, - token: this.userToken - }); + async evaluate(prompt: Prompt): Promise> { + const response = await this.cloud.postSafe<{ message: string }>( + 'evaluate_prompt/predict.json', + { + body: { + messages: prompt, + max_tokens: 400, + model_kind: this.modelKind + }, + token: this.userToken + } + ); - return response.message; + return map(response, ({ message }) => message); } } diff --git a/app/src/lib/ai/ollamaClient.ts b/app/src/lib/ai/ollamaClient.ts index a9653b92f..bb1134d6c 100644 --- a/app/src/lib/ai/ollamaClient.ts +++ b/app/src/lib/ai/ollamaClient.ts @@ -1,5 +1,6 @@ import { LONG_DEFAULT_BRANCH_TEMPLATE, LONG_DEFAULT_COMMIT_TEMPLATE } from '$lib/ai/prompts'; import { MessageRole, type PromptMessage, type AIClient, type Prompt } from '$lib/ai/types'; +import { andThen, buildFailureFromAny, ok, wrap, wrapAsync, type Result } from '$lib/result'; import { isNonEmptyObject } from '$lib/utils/typeguards'; import { fetch, Body, Response } from '@tauri-apps/api/http'; @@ -81,15 +82,22 @@ export class OllamaClient implements AIClient { private modelName: string ) {} - async evaluate(prompt: Prompt) { + async evaluate(prompt: Prompt): Promise> { const messages = this.formatPrompt(prompt); - const response = await this.chat(messages); - const rawResponse = JSON.parse(response.message.content); - if (!isOllamaChatMessageFormat(rawResponse)) { - throw new Error('Invalid response: ' + response.message.content); - } - return rawResponse.result; + const responseResult = await this.chat(messages); + + return andThen(responseResult, (response) => { + const rawResponseResult = wrap(() => JSON.parse(response.message.content)); + + return andThen(rawResponseResult, (rawResponse) => { + if (!isOllamaChatMessageFormat(rawResponse)) { + return buildFailureFromAny('Invalid response: ' + response.message.content); + } + + return ok(rawResponse.result); + }); + }); } /** @@ -124,17 +132,19 @@ ${JSON.stringify(OLLAMA_CHAT_MESSAGE_FORMAT_SCHEMA, null, 2)}` * @param request - The OllamaChatRequest object containing the request details. * @returns A Promise that resolves to the Response object. */ - private async fetchChat(request: OllamaChatRequest): Promise> { + private async fetchChat(request: OllamaChatRequest): Promise, Error>> { const url = new URL(OllamaAPEndpoint.Chat, this.endpoint); const body = Body.json(request); - const result = await fetch(url.toString(), { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body - }); - return result; + return await wrapAsync( + async () => + await fetch(url.toString(), { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body + }) + ); } /** @@ -142,13 +152,12 @@ ${JSON.stringify(OLLAMA_CHAT_MESSAGE_FORMAT_SCHEMA, null, 2)}` * * @param messages - An array of LLMChatMessage objects representing the chat messages. * @param options - Optional LLMRequestOptions object for specifying additional options. - * @throws Error if the response is invalid. * @returns A Promise that resolves to an LLMResponse object representing the response from the LLM model. */ private async chat( messages: Prompt, options?: OllamaRequestOptions - ): Promise { + ): Promise> { const result = await this.fetchChat({ model: this.modelName, stream: false, @@ -157,10 +166,12 @@ ${JSON.stringify(OLLAMA_CHAT_MESSAGE_FORMAT_SCHEMA, null, 2)}` format: 'json' }); - if (!isOllamaChatResponse(result.data)) { - throw new Error('Invalid response\n' + JSON.stringify(result.data)); - } + return andThen(result, (result) => { + if (!isOllamaChatResponse(result.data)) { + return buildFailureFromAny('Invalid response\n' + JSON.stringify(result.data)); + } - return result.data; + return ok(result.data); + }); } } diff --git a/app/src/lib/ai/openAIClient.ts b/app/src/lib/ai/openAIClient.ts index 09ea39d88..757b977b1 100644 --- a/app/src/lib/ai/openAIClient.ts +++ b/app/src/lib/ai/openAIClient.ts @@ -1,6 +1,8 @@ import { SHORT_DEFAULT_BRANCH_TEMPLATE, SHORT_DEFAULT_COMMIT_TEMPLATE } from '$lib/ai/prompts'; +import { andThen, buildFailureFromAny, ok, wrapAsync, type Result } from '$lib/result'; import type { OpenAIModelName, Prompt, AIClient } from '$lib/ai/types'; import type OpenAI from 'openai'; +import type { ChatCompletion } from 'openai/resources/index.mjs'; export class OpenAIClient implements AIClient { defaultCommitTemplate = SHORT_DEFAULT_COMMIT_TEMPLATE; @@ -11,13 +13,21 @@ export class OpenAIClient implements AIClient { private openAI: OpenAI ) {} - async evaluate(prompt: Prompt) { - const response = await this.openAI.chat.completions.create({ - messages: prompt, - model: this.modelName, - max_tokens: 400 + async evaluate(prompt: Prompt): Promise> { + const responseResult = await wrapAsync(async () => { + return await this.openAI.chat.completions.create({ + messages: prompt, + model: this.modelName, + max_tokens: 400 + }); }); - return response.choices[0].message.content || ''; + return andThen(responseResult, (response) => { + if (response.choices[0]?.message.content) { + return ok(response.choices[0]?.message.content); + } else { + return buildFailureFromAny('Open AI generated an empty message'); + } + }); } } diff --git a/app/src/lib/ai/service.test.ts b/app/src/lib/ai/service.test.ts index f25eea65d..6fb58f0f7 100644 --- a/app/src/lib/ai/service.test.ts +++ b/app/src/lib/ai/service.test.ts @@ -11,7 +11,7 @@ import { type Prompt } from '$lib/ai/types'; import { HttpClient } from '$lib/backend/httpClient'; -import * as toasts from '$lib/utils/toasts'; +import { buildFailureFromAny, ok, unwrap, type Result } from '$lib/result'; import { Hunk } from '$lib/vbranches/types'; import { plainToInstance } from 'class-transformer'; import { expect, test, describe, vi } from 'vitest'; @@ -56,8 +56,8 @@ class DummyAIClient implements AIClient { defaultBranchTemplate = SHORT_DEFAULT_BRANCH_TEMPLATE; constructor(private response = 'lorem ipsum') {} - async evaluate(_prompt: Prompt) { - return this.response; + async evaluate(_prompt: Prompt): Promise> { + return ok(this.response); } } @@ -116,16 +116,14 @@ describe.concurrent('AIService', () => { test('With default configuration, When a user token is provided. It returns ButlerAIClient', async () => { const aiService = buildDefaultAIService(); - expect(await aiService.buildClient('token')).toBeInstanceOf(ButlerAIClient); + expect(unwrap(await aiService.buildClient('token'))).toBeInstanceOf(ButlerAIClient); }); test('With default configuration, When a user is undefined. It returns undefined', async () => { - const toastErrorSpy = vi.spyOn(toasts, 'error'); const aiService = buildDefaultAIService(); - expect(await aiService.buildClient()).toBe(undefined); - expect(toastErrorSpy).toHaveBeenLastCalledWith( - "When using GitButler's API to summarize code, you must be logged in" + expect(await aiService.buildClient()).toStrictEqual( + buildFailureFromAny("When using GitButler's API to summarize code, you must be logged in") ); }); @@ -137,11 +135,10 @@ describe.concurrent('AIService', () => { }); const aiService = new AIService(gitConfig, cloud); - expect(await aiService.buildClient()).toBeInstanceOf(OpenAIClient); + expect(unwrap(await aiService.buildClient())).toBeInstanceOf(OpenAIClient); }); test('When token is bring your own, When a openAI token is blank. It returns undefined', async () => { - const toastErrorSpy = vi.spyOn(toasts, 'error'); const gitConfig = new DummyGitConfigService({ ...defaultGitConfig, [GitAIConfigKey.OpenAIKeyOption]: KeyOption.BringYourOwn, @@ -149,9 +146,10 @@ describe.concurrent('AIService', () => { }); const aiService = new AIService(gitConfig, cloud); - expect(await aiService.buildClient()).toBe(undefined); - expect(toastErrorSpy).toHaveBeenLastCalledWith( - 'When using OpenAI in a bring your own key configuration, you must provide a valid token' + expect(await aiService.buildClient()).toStrictEqual( + buildFailureFromAny( + 'When using OpenAI in a bring your own key configuration, you must provide a valid token' + ) ); }); @@ -164,11 +162,10 @@ describe.concurrent('AIService', () => { }); const aiService = new AIService(gitConfig, cloud); - expect(await aiService.buildClient()).toBeInstanceOf(AnthropicAIClient); + expect(unwrap(await aiService.buildClient())).toBeInstanceOf(AnthropicAIClient); }); test('When ai provider is Anthropic, When token is bring your own, When an anthropic token is blank. It returns undefined', async () => { - const toastErrorSpy = vi.spyOn(toasts, 'error'); const gitConfig = new DummyGitConfigService({ ...defaultGitConfig, [GitAIConfigKey.ModelProvider]: ModelKind.Anthropic, @@ -177,9 +174,10 @@ describe.concurrent('AIService', () => { }); const aiService = new AIService(gitConfig, cloud); - expect(await aiService.buildClient()).toBe(undefined); - expect(toastErrorSpy).toHaveBeenLastCalledWith( - 'When using Anthropic in a bring your own key configuration, you must provide a valid token' + expect(await aiService.buildClient()).toStrictEqual( + buildFailureFromAny( + 'When using Anthropic in a bring your own key configuration, you must provide a valid token' + ) ); }); }); @@ -188,9 +186,13 @@ describe.concurrent('AIService', () => { test('When buildModel returns undefined, it returns undefined', async () => { const aiService = buildDefaultAIService(); - vi.spyOn(aiService, 'buildClient').mockReturnValue((async () => undefined)()); + vi.spyOn(aiService, 'buildClient').mockReturnValue( + (async () => buildFailureFromAny('Failed to build'))() + ); - expect(await aiService.summarizeCommit({ hunks: exampleHunks })).toBe(undefined); + expect(await aiService.summarizeCommit({ hunks: exampleHunks })).toStrictEqual( + buildFailureFromAny('Failed to build') + ); }); test('When the AI returns a single line commit message, it returns it unchanged', async () => { @@ -199,10 +201,12 @@ describe.concurrent('AIService', () => { const clientResponse = 'single line commit'; vi.spyOn(aiService, 'buildClient').mockReturnValue( - (async () => new DummyAIClient(clientResponse))() + (async () => ok(new DummyAIClient(clientResponse)))() ); - expect(await aiService.summarizeCommit({ hunks: exampleHunks })).toBe('single line commit'); + expect(await aiService.summarizeCommit({ hunks: exampleHunks })).toStrictEqual( + ok('single line commit') + ); }); test('When the AI returns a title and body that is split by a single new line, it replaces it with two', async () => { @@ -211,10 +215,12 @@ describe.concurrent('AIService', () => { const clientResponse = 'one\nnew line'; vi.spyOn(aiService, 'buildClient').mockReturnValue( - (async () => new DummyAIClient(clientResponse))() + (async () => ok(new DummyAIClient(clientResponse)))() ); - expect(await aiService.summarizeCommit({ hunks: exampleHunks })).toBe('one\n\nnew line'); + expect(await aiService.summarizeCommit({ hunks: exampleHunks })).toStrictEqual( + ok('one\n\nnew line') + ); }); test('When the commit is in brief mode, When the AI returns a title and body, it takes just the title', async () => { @@ -223,12 +229,12 @@ describe.concurrent('AIService', () => { const clientResponse = 'one\nnew line'; vi.spyOn(aiService, 'buildClient').mockReturnValue( - (async () => new DummyAIClient(clientResponse))() + (async () => ok(new DummyAIClient(clientResponse)))() ); - expect(await aiService.summarizeCommit({ hunks: exampleHunks, useBriefStyle: true })).toBe( - 'one' - ); + expect( + await aiService.summarizeCommit({ hunks: exampleHunks, useBriefStyle: true }) + ).toStrictEqual(ok('one')); }); }); @@ -236,9 +242,13 @@ describe.concurrent('AIService', () => { test('When buildModel returns undefined, it returns undefined', async () => { const aiService = buildDefaultAIService(); - vi.spyOn(aiService, 'buildClient').mockReturnValue((async () => undefined)()); + vi.spyOn(aiService, 'buildClient').mockReturnValue( + (async () => buildFailureFromAny('Failed to build client'))() + ); - expect(await aiService.summarizeBranch({ hunks: exampleHunks })).toBe(undefined); + expect(await aiService.summarizeBranch({ hunks: exampleHunks })).toStrictEqual( + buildFailureFromAny('Failed to build client') + ); }); test('When the AI client returns a string with spaces, it replaces them with hypens', async () => { @@ -247,10 +257,12 @@ describe.concurrent('AIService', () => { const clientResponse = 'with spaces included'; vi.spyOn(aiService, 'buildClient').mockReturnValue( - (async () => new DummyAIClient(clientResponse))() + (async () => ok(new DummyAIClient(clientResponse)))() ); - expect(await aiService.summarizeBranch({ hunks: exampleHunks })).toBe('with-spaces-included'); + expect(await aiService.summarizeBranch({ hunks: exampleHunks })).toStrictEqual( + ok('with-spaces-included') + ); }); test('When the AI client returns multiple lines, it replaces them with hypens', async () => { @@ -259,11 +271,11 @@ describe.concurrent('AIService', () => { const clientResponse = 'with\nnew\nlines\nincluded'; vi.spyOn(aiService, 'buildClient').mockReturnValue( - (async () => new DummyAIClient(clientResponse))() + (async () => ok(new DummyAIClient(clientResponse)))() ); - expect(await aiService.summarizeBranch({ hunks: exampleHunks })).toBe( - 'with-new-lines-included' + expect(await aiService.summarizeBranch({ hunks: exampleHunks })).toStrictEqual( + ok('with-new-lines-included') ); }); @@ -273,11 +285,11 @@ describe.concurrent('AIService', () => { const clientResponse = 'with\nnew lines\nincluded'; vi.spyOn(aiService, 'buildClient').mockReturnValue( - (async () => new DummyAIClient(clientResponse))() + (async () => ok(new DummyAIClient(clientResponse)))() ); - expect(await aiService.summarizeBranch({ hunks: exampleHunks })).toBe( - 'with-new-lines-included' + expect(await aiService.summarizeBranch({ hunks: exampleHunks })).toStrictEqual( + ok('with-new-lines-included') ); }); }); diff --git a/app/src/lib/ai/service.ts b/app/src/lib/ai/service.ts index 3e30bb112..30554ba0b 100644 --- a/app/src/lib/ai/service.ts +++ b/app/src/lib/ai/service.ts @@ -14,8 +14,8 @@ import { MessageRole, type Prompt } from '$lib/ai/types'; +import { buildFailureFromAny, isFailure, ok, type Result } from '$lib/result'; import { splitMessage } from '$lib/utils/commitMessage'; -import * as toasts from '$lib/utils/toasts'; import OpenAI from 'openai'; import type { GitConfigService } from '$lib/backend/gitConfigService'; import type { HttpClient } from '$lib/backend/httpClient'; @@ -189,21 +189,22 @@ export class AIService { // This optionally returns a summarizer. There are a few conditions for how this may occur // Firstly, if the user has opted to use the GB API and isn't logged in, it will return undefined // Secondly, if the user has opted to bring their own key but hasn't provided one, it will return undefined - async buildClient(userToken?: string): Promise { + async buildClient(userToken?: string): Promise> { const modelKind = await this.getModelKind(); if (await this.usingGitButlerAPI()) { if (!userToken) { - toasts.error("When using GitButler's API to summarize code, you must be logged in"); - return; + return buildFailureFromAny( + "When using GitButler's API to summarize code, you must be logged in" + ); } - return new ButlerAIClient(this.cloud, userToken, modelKind); + return ok(new ButlerAIClient(this.cloud, userToken, modelKind)); } if (modelKind === ModelKind.Ollama) { const ollamaEndpoint = await this.getOllamaEndpoint(); const ollamaModelName = await this.getOllamaModelName(); - return new OllamaClient(ollamaEndpoint, ollamaModelName); + return ok(new OllamaClient(ollamaEndpoint, ollamaModelName)); } if (modelKind === ModelKind.OpenAI) { @@ -211,14 +212,13 @@ export class AIService { const openAIKey = await this.getOpenAIKey(); if (!openAIKey) { - toasts.error( + return buildFailureFromAny( 'When using OpenAI in a bring your own key configuration, you must provide a valid token' ); - return; } const openAI = new OpenAI({ apiKey: openAIKey, dangerouslyAllowBrowser: true }); - return new OpenAIClient(openAIModelName, openAI); + return ok(new OpenAIClient(openAIModelName, openAI)); } if (modelKind === ModelKind.Anthropic) { @@ -226,14 +226,15 @@ export class AIService { const anthropicKey = await this.getAnthropicKey(); if (!anthropicKey) { - toasts.error( + return buildFailureFromAny( 'When using Anthropic in a bring your own key configuration, you must provide a valid token' ); - return; } - return new AnthropicAIClient(anthropicKey, anthropicModelName); + return ok(new AnthropicAIClient(anthropicKey, anthropicModelName)); } + + return buildFailureFromAny('Failed to build ai client'); } async summarizeCommit({ @@ -242,9 +243,10 @@ export class AIService { useBriefStyle = false, commitTemplate, userToken - }: SummarizeCommitOpts) { - const aiClient = await this.buildClient(userToken); - if (!aiClient) return; + }: SummarizeCommitOpts): Promise> { + const aiClientResult = await this.buildClient(userToken); + if (isFailure(aiClientResult)) return aiClientResult; + const aiClient = aiClientResult.value; const diffLengthLimit = await this.getDiffLengthLimitConsideringAPI(); const defaultedCommitTemplate = commitTemplate || aiClient.defaultCommitTemplate; @@ -272,19 +274,26 @@ export class AIService { }; }); - let message = await aiClient.evaluate(prompt); + const messageResult = await aiClient.evaluate(prompt); + if (isFailure(messageResult)) return messageResult; + let message = messageResult.value; if (useBriefStyle) { message = message.split('\n')[0]; } const { title, description } = splitMessage(message); - return description ? `${title}\n\n${description}` : title; + return ok(description ? `${title}\n\n${description}` : title); } - async summarizeBranch({ hunks, branchTemplate, userToken = undefined }: SummarizeBranchOpts) { - const aiClient = await this.buildClient(userToken); - if (!aiClient) return; + async summarizeBranch({ + hunks, + branchTemplate, + userToken = undefined + }: SummarizeBranchOpts): Promise> { + const aiClientResult = await this.buildClient(userToken); + if (isFailure(aiClientResult)) return aiClientResult; + const aiClient = aiClientResult.value; const diffLengthLimit = await this.getDiffLengthLimitConsideringAPI(); const defaultedBranchTemplate = branchTemplate || aiClient.defaultBranchTemplate; @@ -299,7 +308,10 @@ export class AIService { }; }); - const message = await aiClient.evaluate(prompt); - return message.replaceAll(' ', '-').replaceAll('\n', '-'); + const messageResult = await aiClient.evaluate(prompt); + if (isFailure(messageResult)) return messageResult; + const message = messageResult.value; + + return ok(message.replaceAll(' ', '-').replaceAll('\n', '-')); } } diff --git a/app/src/lib/ai/types.ts b/app/src/lib/ai/types.ts index 4ff924a41..cbcc6da08 100644 --- a/app/src/lib/ai/types.ts +++ b/app/src/lib/ai/types.ts @@ -1,4 +1,5 @@ import type { Persisted } from '$lib/persisted/persisted'; +import type { Result } from '$lib/result'; export enum ModelKind { OpenAI = 'openai', @@ -33,7 +34,7 @@ export interface PromptMessage { export type Prompt = PromptMessage[]; export interface AIClient { - evaluate(prompt: Prompt): Promise; + evaluate(prompt: Prompt): Promise>; defaultBranchTemplate: Prompt; defaultCommitTemplate: Prompt; diff --git a/app/src/lib/backend/httpClient.ts b/app/src/lib/backend/httpClient.ts index 95af89270..1d3bea93a 100644 --- a/app/src/lib/backend/httpClient.ts +++ b/app/src/lib/backend/httpClient.ts @@ -1,3 +1,4 @@ +import { wrapAsync } from '$lib/result'; import { PUBLIC_API_BASE_URL } from '$env/static/public'; export const API_URL = new URL('/api/', PUBLIC_API_BASE_URL); @@ -47,21 +48,41 @@ export class HttpClient { return await this.request(path, { ...opts, method: 'GET' }); } + async getSafe(path: string, opts?: Omit) { + return await wrapAsync(async () => await this.get(path, opts)); + } + async post(path: string, opts?: RequestOptions) { return await this.request(path, { ...opts, method: 'POST' }); } + async postSafe(path: string, opts?: RequestOptions) { + return await wrapAsync(async () => await this.post(path, opts)); + } + async put(path: string, opts?: RequestOptions) { return await this.request(path, { ...opts, method: 'PUT' }); } + async putSafe(path: string, opts?: RequestOptions) { + return await wrapAsync(async () => await this.put(path, opts)); + } + async patch(path: string, opts?: RequestOptions) { return await this.request(path, { ...opts, method: 'PATCH' }); } + async patchSafe(path: string, opts?: RequestOptions) { + return await wrapAsync(async () => await this.patch(path, opts)); + } + async delete(path: string, opts?: RequestOptions) { return await this.request(path, { ...opts, method: 'DELETE' }); } + + async deleteSafe(path: string, opts?: RequestOptions) { + return await wrapAsync(async () => await this.delete(path, opts)); + } } function getApiUrl(path: string) { diff --git a/app/src/lib/branch/BranchCard.svelte b/app/src/lib/branch/BranchCard.svelte index 388cba8e5..6e29d1b41 100644 --- a/app/src/lib/branch/BranchCard.svelte +++ b/app/src/lib/branch/BranchCard.svelte @@ -17,6 +17,7 @@ import BranchFiles from '$lib/file/BranchFiles.svelte'; import { showError } from '$lib/notifications/toasts'; import { persisted } from '$lib/persisted/persisted'; + import { isFailure } from '$lib/result'; import { SETTINGS, type Settings } from '$lib/settings/userSettings'; import Resizer from '$lib/shared/Resizer.svelte'; import { User } from '$lib/stores/user'; @@ -64,21 +65,25 @@ const hunks = branch.files.flatMap((f) => f.hunks); - try { - const prompt = promptService.selectedBranchPrompt(project.id); - const message = await aiService.summarizeBranch({ - hunks, - userToken: $user?.access_token, - branchTemplate: prompt - }); + const prompt = promptService.selectedBranchPrompt(project.id); + const messageResult = await aiService.summarizeBranch({ + hunks, + userToken: $user?.access_token, + branchTemplate: prompt + }); - if (message && message !== branch.name) { - branch.name = message; - branchController.updateBranchName(branch.id, branch.name); - } - } catch (e) { - console.error(e); - showError('Failed to generate branch name', e); + if (isFailure(messageResult)) { + console.error(messageResult.failure); + showError('Failed to generate branch name', messageResult.failure); + + return; + } + + const message = messageResult.value; + + if (message && message !== branch.name) { + branch.name = message; + branchController.updateBranchName(branch.id, branch.name); } } diff --git a/app/src/lib/commit/CommitMessageInput.svelte b/app/src/lib/commit/CommitMessageInput.svelte index d9473a696..6fb4f932d 100644 --- a/app/src/lib/commit/CommitMessageInput.svelte +++ b/app/src/lib/commit/CommitMessageInput.svelte @@ -11,6 +11,7 @@ projectCommitGenerationUseEmojis } from '$lib/config/config'; import { showError } from '$lib/notifications/toasts'; + import { isFailure } from '$lib/result'; import Checkbox from '$lib/shared/Checkbox.svelte'; import DropDownButton from '$lib/shared/DropDownButton.svelte'; import Icon from '$lib/shared/Icon.svelte'; @@ -75,27 +76,35 @@ } aiLoading = true; - try { - const prompt = promptService.selectedCommitPrompt(project.id); - console.log(prompt); - const generatedMessage = await aiService.summarizeCommit({ - hunks, - useEmojiStyle: $commitGenerationUseEmojis, - useBriefStyle: $commitGenerationExtraConcise, - userToken: $user?.access_token, - commitTemplate: prompt - }); - if (generatedMessage) { - commitMessage = generatedMessage; - } else { - throw new Error('Prompt generated no response'); - } - } catch (e: any) { - showError('Failed to generate commit message', e); - } finally { + const prompt = promptService.selectedCommitPrompt(project.id); + + const generatedMessageResult = await aiService.summarizeCommit({ + hunks, + useEmojiStyle: $commitGenerationUseEmojis, + useBriefStyle: $commitGenerationExtraConcise, + userToken: $user?.access_token, + commitTemplate: prompt + }); + + if (isFailure(generatedMessageResult)) { + showError('Failed to generate commit message', generatedMessageResult.failure); aiLoading = false; + return; } + + const generatedMessage = generatedMessageResult.value; + + if (generatedMessage) { + commitMessage = generatedMessage; + } else { + const errorMessage = 'Prompt generated no response'; + showError(errorMessage, undefined); + aiLoading = false; + return; + } + + aiLoading = false; } onMount(async () => { diff --git a/app/src/lib/result.ts b/app/src/lib/result.ts new file mode 100644 index 000000000..08d1d2725 --- /dev/null +++ b/app/src/lib/result.ts @@ -0,0 +1,99 @@ +export class Panic extends Error {} + +export type OkVariant = { + ok: true; + value: Ok; +}; + +export type FailureVariant = { + ok: false; + failure: Err; +}; + +export type Result = OkVariant | FailureVariant; + +export function isOk( + subject: OkVariant | FailureVariant +): subject is OkVariant { + return subject.ok; +} + +export function isFailure( + subject: OkVariant | FailureVariant +): subject is FailureVariant { + return !subject.ok; +} + +export function ok(value: Ok): Result { + return { ok: true, value }; +} + +export function failure(value: Err): Result { + return { ok: false, failure: value }; +} + +export function buildFailureFromAny(value: any): Result { + if (value instanceof Error) { + return failure(value); + } else { + return failure(new Error(String(value))); + } +} + +export function wrap(subject: () => Ok): Result { + try { + return ok(subject()); + } catch (e) { + return failure(e as Err); + } +} + +export async function wrapAsync(subject: () => Promise): Promise> { + try { + return ok(await subject()); + } catch (e) { + return failure(e as Err); + } +} + +export function unwrap(subject: Result): Ok { + if (isOk(subject)) { + return subject.value; + } else { + if (subject.failure instanceof Error) { + throw subject.failure; + } else { + throw new Panic(String(subject.failure)); + } + } +} + +export function unwrapOr(subject: Result, or: Or): Ok | Or { + if (isOk(subject)) { + return subject.value; + } else { + return or; + } +} + +export function map( + subject: Result, + transformation: (ok: Ok) => NewOk +): Result { + if (isOk(subject)) { + return ok(transformation(subject.value)); + } else { + return subject; + } +} + +export function andThen( + subject: Result, + transformation: (ok: Ok) => Result +): Result { + if (isOk(subject)) { + return transformation(subject.value); + } else { + return subject; + } +}