Merge branch 'ndom91/add-playwright-test' of github.com:ndom91/gitbutler into ndom91/add-playwright-test

This commit is contained in:
ndom91 2024-05-05 18:51:21 +02:00
commit dd79d9ec5a
No known key found for this signature in database
152 changed files with 4947 additions and 2584 deletions

View File

@ -15,6 +15,10 @@ runs:
steps:
- uses: ./.github/actions/init-env-rust
- run: |
cargo build --locked -p gitbutler-git --bins
shell: bash
- run: |
printf '%s\n' "$JSON_DOC" > /tmp/features.json
cat /tmp/features.json | jq -r 'if . == "*" then "--all-features" elif . == "" then "" elif type == "array" then if length == 0 then "--no-default-features" else "--no-default-features --features " + join(",") end else . end' > /tmp/features

View File

@ -225,4 +225,4 @@ jobs:
- uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- name: "cargo check"
run: cargo check --all --bins --examples
run: cargo check --all --bins --examples --features windows

226
Cargo.lock generated
View File

@ -101,9 +101,6 @@ name = "anyhow"
version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247"
dependencies = [
"backtrace",
]
[[package]]
name = "arc-swap"
@ -1123,16 +1120,6 @@ dependencies = [
"parking_lot_core 0.9.9",
]
[[package]]
name = "debugid"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d"
dependencies = [
"serde",
"uuid",
]
[[package]]
name = "der"
version = "0.7.9"
@ -1598,18 +1585,6 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "findshlibs"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40b9e59cd0f7e0806cca4be089683ecb6434e602038df21fe6bf6711b2f07f64"
dependencies = [
"cc",
"lazy_static",
"libc",
"winapi 0.3.9",
]
[[package]]
name = "flate2"
version = "1.0.28"
@ -2031,21 +2006,6 @@ dependencies = [
"thiserror",
]
[[package]]
name = "gitbutler-analytics"
version = "0.0.0"
dependencies = [
"async-trait",
"chrono",
"gitbutler-core",
"reqwest 0.12.2",
"serde",
"serde_json",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "gitbutler-changeset"
version = "0.0.0"
@ -2094,6 +2054,7 @@ dependencies = [
"slug",
"ssh-key",
"ssh2",
"strum",
"tempfile",
"thiserror",
"tokio",
@ -2131,7 +2092,6 @@ dependencies = [
"console-subscriber",
"futures",
"git2",
"gitbutler-analytics",
"gitbutler-core",
"gitbutler-testsupport",
"gitbutler-watcher",
@ -2141,8 +2101,6 @@ dependencies = [
"once_cell",
"pretty_assertions",
"reqwest 0.12.2",
"sentry",
"sentry-tracing",
"serde",
"serde_json",
"slug",
@ -2182,7 +2140,6 @@ dependencies = [
"crossbeam-channel",
"futures",
"git2",
"gitbutler-analytics",
"gitbutler-core",
"gitbutler-testsupport",
"itertools 0.12.1",
@ -3147,17 +3104,6 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "hostname"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867"
dependencies = [
"libc",
"match_cfg",
"winapi 0.3.9",
]
[[package]]
name = "html5ever"
version = "0.26.0"
@ -3860,12 +3806,6 @@ dependencies = [
"tendril",
]
[[package]]
name = "match_cfg"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4"
[[package]]
name = "matchers"
version = "0.1.0"
@ -5648,126 +5588,6 @@ dependencies = [
"serde",
]
[[package]]
name = "sentry"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "766448f12e44d68e675d5789a261515c46ac6ccd240abdd451a9c46c84a49523"
dependencies = [
"httpdate",
"native-tls",
"reqwest 0.11.27",
"sentry-anyhow",
"sentry-backtrace",
"sentry-contexts",
"sentry-core",
"sentry-debug-images",
"sentry-panic",
"sentry-tracing",
"tokio",
"ureq",
]
[[package]]
name = "sentry-anyhow"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4da4015667c99f88d68ca7ff02b90c762d6154a4ceb7c02922b9a1dbd3959eeb"
dependencies = [
"anyhow",
"sentry-backtrace",
"sentry-core",
]
[[package]]
name = "sentry-backtrace"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32701cad8b3c78101e1cd33039303154791b0ff22e7802ed8cc23212ef478b45"
dependencies = [
"backtrace",
"once_cell",
"regex",
"sentry-core",
]
[[package]]
name = "sentry-contexts"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17ddd2a91a13805bd8dab4ebf47323426f758c35f7bf24eacc1aded9668f3824"
dependencies = [
"hostname",
"libc",
"os_info",
"rustc_version",
"sentry-core",
"uname",
]
[[package]]
name = "sentry-core"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1189f68d7e7e102ef7171adf75f83a59607fafd1a5eecc9dc06c026ff3bdec4"
dependencies = [
"once_cell",
"rand 0.8.5",
"sentry-types",
"serde",
"serde_json",
]
[[package]]
name = "sentry-debug-images"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b4d0a615e5eeca5699030620c119a094e04c14cf6b486ea1030460a544111a7"
dependencies = [
"findshlibs",
"once_cell",
"sentry-core",
]
[[package]]
name = "sentry-panic"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1c18d0b5fba195a4950f2f4c31023725c76f00aabb5840b7950479ece21b5ca"
dependencies = [
"sentry-backtrace",
"sentry-core",
]
[[package]]
name = "sentry-tracing"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3012699a9957d7f97047fd75d116e22d120668327db6e7c59824582e16e791b2"
dependencies = [
"sentry-backtrace",
"sentry-core",
"tracing-core",
"tracing-subscriber",
]
[[package]]
name = "sentry-types"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7173fd594569091f68a7c37a886e202f4d0c1db1e1fa1d18a051ba695b2e2ec"
dependencies = [
"debugid",
"hex",
"rand 0.8.5",
"serde",
"serde_json",
"thiserror",
"time",
"url",
"uuid",
]
[[package]]
name = "serde"
version = "1.0.197"
@ -6213,6 +6033,28 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "strum"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6cf59daf282c0a494ba14fd21610a0325f9f90ec9d1231dea26bcb1d696c946"
dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.58",
]
[[package]]
name = "subtle"
version = "2.5.0"
@ -7122,15 +6964,6 @@ dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "uname"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8"
dependencies = [
"libc",
]
[[package]]
name = "unicode-bidi"
version = "0.3.15"
@ -7164,19 +6997,6 @@ version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
[[package]]
name = "ureq"
version = "2.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11f214ce18d8b2cbe84ed3aa6486ed3f5b285cf8d8fbdbce9f3f767a724adc35"
dependencies = [
"base64 0.21.7",
"log",
"native-tls",
"once_cell",
"url",
]
[[package]]
name = "url"
version = "2.5.0"

View File

@ -1,6 +1,5 @@
[workspace]
members = [
"crates/gitbutler-analytics",
"crates/gitbutler-core",
"crates/gitbutler-tauri",
"crates/gitbutler-changeset",
@ -21,7 +20,6 @@ tokio = { version = "1.37.0", default-features = false }
gitbutler-git = { path = "crates/gitbutler-git" }
gitbutler-core = { path = "crates/gitbutler-core" }
gitbutler-analytics = { path = "crates/gitbutler-analytics" }
gitbutler-watcher = { path = "crates/gitbutler-watcher" }
gitbutler-testsupport = { path = "crates/gitbutler-testsupport" }
@ -29,4 +27,4 @@ gitbutler-testsupport = { path = "crates/gitbutler-testsupport" }
codegen-units = 1 # Compile crates one after another so the compiler can optimize better
lto = true # Enables link to optimizations
opt-level = "s" # Optimize for binary size
debug = true # Enable debug symbols, for sentry
debug = true # Enable debug symbols, for profiling

View File

@ -44,18 +44,17 @@
"@octokit/rest": "^20.1.0",
"@playwright/test": "^1.40.0",
"@replit/codemirror-lang-svelte": "^6.0.0",
"@sentry/sveltekit": "^7.111.0",
"@sentry/sveltekit": "^7.112.2",
"@sveltejs/adapter-static": "^2.0.3",
"@sveltejs/kit": "^1.30.4",
"@tauri-apps/api": "^1.5.3",
"@tauri-apps/api": "^1.5.4",
"@types/crypto-js": "^4.2.2",
"@types/diff": "^5.2.0",
"@types/diff-match-patch": "^1.0.36",
"@types/lscache": "^1.3.4",
"@types/marked": "^5.0.2",
"@types/node": "^20.12.8",
"@typescript-eslint/eslint-plugin": "^7.7.0",
"@typescript-eslint/parser": "^7.7.0",
"@typescript-eslint/eslint-plugin": "^7.7.1",
"@typescript-eslint/parser": "^7.7.1",
"autoprefixer": "^10.4.19",
"class-transformer": "^0.5.1",
"crypto-js": "^4.2.0",
@ -66,7 +65,7 @@
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-square-svelte-store": "^1.0.0",
"eslint-plugin-svelte": "^2.37.0",
"eslint-plugin-svelte": "^2.38.0",
"inter-ui": "^4.0.2",
"leven": "^4.0.0",
"lscache": "^1.3.2",
@ -76,14 +75,14 @@
"nanoid": "^5.0.7",
"postcss": "^8.4.38",
"postcss-load-config": "^5.0.3",
"posthog-js": "1.128.2",
"posthog-js": "1.130.1",
"prettier": "^3.2.5",
"prettier-plugin-svelte": "^3.2.3",
"prettier-plugin-tailwindcss": "^0.5.14",
"reflect-metadata": "^0.2.2",
"rxjs": "^7.8.1",
"svelte": "^4.2.15",
"svelte-check": "^3.6.9",
"svelte-check": "^3.7.0",
"svelte-floating-ui": "^1.5.8",
"svelte-french-toast": "^1.2.0",
"svelte-loadable-store": "^2.0.1",
@ -101,7 +100,7 @@
"vitest": "^0.34.6"
},
"dependencies": {
"openai": "^4.38.2"
"openai": "^4.38.5"
},
"packageManager": "pnpm@8.15.5"
}

View File

@ -0,0 +1,34 @@
import { initPostHog } from '$lib/analytics/posthog';
import { initSentry } from '$lib/analytics/sentry';
import { appAnalyticsConfirmed } from '$lib/config/appSettings';
import {
appMetricsEnabled,
appErrorReportingEnabled,
appNonAnonMetricsEnabled
} from '$lib/config/appSettings';
import posthog from 'posthog-js';
export function initAnalyticsIfEnabled() {
const analyticsConfirmed = appAnalyticsConfirmed();
analyticsConfirmed.onDisk().then((confirmed) => {
if (confirmed) {
appErrorReportingEnabled()
.onDisk()
.then((enabled) => {
if (enabled) initSentry();
});
appMetricsEnabled()
.onDisk()
.then((enabled) => {
if (enabled) initPostHog();
});
appNonAnonMetricsEnabled()
.onDisk()
.then((enabled) => {
enabled
? posthog.capture('nonAnonMetricsEnabled')
: posthog.capture('nonAnonMetricsDisabled');
});
}
});
}

View File

@ -7,7 +7,7 @@ export async function initPostHog() {
const [appName, appVersion] = await Promise.all([getName(), getVersion()]);
posthog.init(PUBLIC_POSTHOG_API_KEY, {
api_host: 'https://eu.posthog.com',
disable_session_recording: appName !== 'GitButler', // only record sessions in production
disable_session_recording: true,
capture_performance: false,
request_batching: true,
persistence: 'localStorage',

View File

@ -1,6 +1,7 @@
<script lang="ts">
import AnalyticsSettings from './AnalyticsSettings.svelte';
import Button from './Button.svelte';
import { initAnalyticsIfEnabled } from '$lib/analytics/analytics';
import type { Writable } from 'svelte/store';
export let analyticsConfirmed: Writable<boolean>;
@ -17,6 +18,7 @@
icon="chevron-right-small"
on:click={() => {
$analyticsConfirmed = true;
initAnalyticsIfEnabled();
}}
>
Continue

View File

@ -1,22 +1,27 @@
<script lang="ts">
import InfoMessage from './InfoMessage.svelte';
import Link from './Link.svelte';
import SectionCard from './SectionCard.svelte';
import Toggle from './Toggle.svelte';
import { appErrorReportingEnabled, appMetricsEnabled } from '$lib/config/appSettings';
import {
appErrorReportingEnabled,
appMetricsEnabled,
appNonAnonMetricsEnabled
} from '$lib/config/appSettings';
const errorReportingEnabled = appErrorReportingEnabled();
const metricsEnabled = appMetricsEnabled();
let updatedTelemetrySettings = false;
const nonAnonMetricsEnabled = appNonAnonMetricsEnabled();
function toggleErrorReporting() {
$errorReportingEnabled = !$errorReportingEnabled;
updatedTelemetrySettings = true;
}
function toggleMetrics() {
$metricsEnabled = !$metricsEnabled;
updatedTelemetrySettings = true;
}
function toggleNonAnonMetrics() {
$nonAnonMetricsEnabled = !$nonAnonMetricsEnabled;
}
</script>
@ -24,7 +29,13 @@
<div class="analytics-settings__content">
<p class="text-base-body-13 analytics-settings__text">
GitButler uses telemetry strictly to help us improve the client. We do not collect any
personal information.
personal information (<Link
target="_blank"
rel="noreferrer"
href="https://gitbutler.com/privacy"
>
privacy policy
</Link>).
</p>
<p class="text-base-body-13 analytics-settings__text">
We kindly ask you to consider keeping these settings enabled as it helps us catch issues more
@ -61,13 +72,19 @@
</svelte:fragment>
</SectionCard>
{#if updatedTelemetrySettings}
<InfoMessage>
<svelte:fragment slot="content"
>Changes will take effect on the next application start.</svelte:fragment
>
</InfoMessage>
{/if}
<SectionCard labelFor="metricsEnabledToggle" on:click={toggleMetrics} orientation="row">
<svelte:fragment slot="title">Non-anonymous usage metrics</svelte:fragment>
<svelte:fragment slot="caption"
>Toggle sharing of identifiable usage statistics.</svelte:fragment
>
<svelte:fragment slot="actions">
<Toggle
id="nonAnonMetricsEnabledToggle"
checked={$nonAnonMetricsEnabled}
on:change={toggleNonAnonMetrics}
/>
</svelte:fragment>
</SectionCard>
</div>
</section>

View File

@ -5,7 +5,7 @@
export let help = '';
</script>
<div class="badge text-base-10 text-semibold" use:tooltip={help}>
<div class="badge text-base-9 text-semibold" use:tooltip={help}>
{count}
</div>
@ -15,12 +15,12 @@
align-items: center;
justify-content: center;
text-align: center;
height: var(--size-16);
min-width: var(--size-16);
border-radius: var(--size-16);
height: var(--size-14);
min-width: var(--size-14);
border-radius: var(--size-14);
padding: 0 var(--size-4);
color: var(--clr-scale-ntrl-100);
background-color: var(--clr-scale-ntrl-50);
background-color: var(--clr-scale-ntrl-40);
line-height: 90%;
}
</style>

View File

@ -102,7 +102,7 @@
}
.row_1 {
display: flex;
gap: var(--size-6);
gap: var(--size-4);
align-items: center;
color: var(--clr-scale-ntrl-10);
}

View File

@ -1,12 +1,16 @@
<script lang="ts" async="true">
import Button from './Button.svelte';
import FullviewLoading from './FullviewLoading.svelte';
import NewBranchDropZone from './NewBranchDropZone.svelte';
import dzenSvg from '$lib/assets/dzen-pc.svg?raw';
import { Project } from '$lib/backend/projects';
import BranchLane from '$lib/components/BranchLane.svelte';
import Icon from '$lib/components/Icon.svelte';
import Select from '$lib/components/Select.svelte';
import SelectItem from '$lib/components/SelectItem.svelte';
import { cloneWithRotation } from '$lib/dragging/draggable';
import { getContext, getContextStore } from '$lib/utils/context';
import { getRemoteBranches } from '$lib/vbranches/baseBranch';
import { BranchController } from '$lib/vbranches/branchController';
import { BaseBranch } from '$lib/vbranches/types';
import { VirtualBranchService } from '$lib/vbranches/virtualBranch';
@ -16,10 +20,15 @@
const branchController = getContext(BranchController);
const baseBranch = getContextStore(BaseBranch);
const project = getContext(Project);
const activeBranchesError = vbranchService.activeBranchesError;
const activeBranches = vbranchService.activeBranches;
let selectedBranch:
| {
name: string;
}
| undefined;
let dragged: any;
let dropZone: HTMLDivElement;
let priorPosition = 0;
@ -27,12 +36,39 @@
let dragHandle: any;
let clone: any;
let showBranchSwitch = false;
let isSwitching = false;
function toggleBranchSwitch() {
showBranchSwitch = !showBranchSwitch;
}
async function onSetBaseBranchClick() {
if (!selectedBranch) return;
// while target is setting, display loading
isSwitching = true;
await branchController
.setTarget(selectedBranch.name)
.then((res) => {
console.log('done', res);
})
.catch((err) => {
console.log('error', err);
})
.finally(() => {
isSwitching = false;
showBranchSwitch = false;
});
}
</script>
{#if $activeBranchesError}
<div class="p-4" data-tauri-drag-region>Something went wrong...</div>
{:else if !$activeBranches}
<FullviewLoading />
{:else if isSwitching}
<div class="middle-message">switching base branch...</div>
{:else}
<div
class="board"
@ -122,6 +158,60 @@
<br />
Any edits auto-create a virtual branch for easy management.
</p>
<div class="branch-switcher">
{#if showBranchSwitch}
{#await getRemoteBranches(project.id)}
loading remote branches...
{:then remoteBranches}
{#if remoteBranches.length == 0}
No remote branches
{:else}
<div class="spacer">
<Select
items={remoteBranches.filter(
(item) => item.name != $baseBranch.branchName
)}
bind:value={selectedBranch}
itemId="name"
labelId="name"
>
<SelectItem slot="template" let:item let:selected {selected}>
{item.name}
</SelectItem>
</Select>
<Button
style="error"
kind="solid"
on:click={onSetBaseBranchClick}
icon="chevron-right-small"
id="set-base-branch"
>
Update Base Branch
</Button>
</div>
{/if}
{:catch}
No remote branches
{/await}
{:else}
<div>
<div class="branch-display">
<div>Your current base branch is:</div>
<div class="branch-name">{$baseBranch.branchName}</div>
</div>
<Button
style="pop"
kind="solid"
on:click={toggleBranchSwitch}
icon="chevron-right-small"
id="set-base-branch"
>
Change Base Branch
</Button>
</div>
{/if}
</div>
</div>
<div class="empty-board__suggestions">
@ -202,6 +292,12 @@
height: 100%;
}
.spacer {
display: flex;
flex-direction: column;
gap: var(--size-16);
}
.branch {
height: 100%;
}
@ -255,6 +351,38 @@
padding-left: var(--size-4);
}
.branch-switcher {
margin-top: 8px;
padding: 8px;
background-color: #f5f5f5;
border-width: 1px;
border-color: #888888;
border-radius: 4px;
}
.branch-display {
display: flex;
flex-direction: row;
align-items: center;
gap: 4px;
margin-bottom: 2px;
}
.branch-name {
font-weight: 600;
font-family: monospace;
}
.middle-message {
display: flex;
justify-content: center;
align-items: center;
height: 100%;
width: 100%;
font-size: 2em;
color: #888888;
}
.empty-board__image-frame {
flex-shrink: 0;
position: relative;

View File

@ -41,7 +41,7 @@
flex: 1;
display: flex;
flex-direction: column;
background: var(--clr-theme-container-light);
background: var(--clr-bg-1);
border-radius: var(--radius-m) var(--radius-m) 0 0;
padding: 0 var(--size-14) var(--size-14);
}

View File

@ -61,11 +61,9 @@
border: 1px solid transparent;
}
.branch-name-mesure-el {
pointer-events: auto;
pointer-events: none;
visibility: hidden;
border: 2px solid transparent;
top: -9999px;
left: -9999px;
color: black;
position: fixed;
display: inline-block;

View File

@ -102,39 +102,7 @@
&:disabled {
cursor: default;
pointer-events: none;
opacity: 0.7;
&.neutral.solid,
&.pop.solid,
&.success.solid,
&.error.solid,
&.warning.solid,
&.purple.solid {
--btn-clr: var(--clr-text-2);
--btn-bg: var(--clr-bg-3);
& .badge {
--btn-bg: var(--clr-scale-ntrl-100);
}
}
&.neutral.soft,
&.pop.soft,
&.success.soft,
&.error.soft,
&.warning.soft,
&.purple.soft {
--btn-clr: var(--clr-text-2);
--btn-bg: var(--clr-bg-3);
}
&.ghost {
--btn-clr: var(--clr-text-2);
}
&.ghost.solid {
border-color: var(--clr-bg-3);
}
opacity: 0.5;
}
&.wide {
display: flex;
@ -238,7 +206,7 @@
.pop {
&.soft {
--btn-clr: var(--clr-theme-pop-on-container);
--btn-clr: var(--clr-theme-pop-on-soft);
--btn-bg: var(--clr-scale-pop-80);
/* if button */
&:not(.not-clickable, &:disabled):hover {
@ -261,7 +229,7 @@
.success {
&.soft {
--btn-clr: var(--clr-theme-succ-on-container);
--btn-clr: var(--clr-theme-succ-on-soft);
--btn-bg: var(--clr-scale-succ-80);
/* if button */
&:not(.not-clickable, &:disabled):hover {
@ -284,7 +252,7 @@
.error {
&.soft {
--btn-clr: var(--clr-theme-err-on-container);
--btn-clr: var(--clr-theme-err-on-soft);
--btn-bg: var(--clr-scale-err-80);
/* if button */
&:not(.not-clickable, &:disabled):hover {
@ -307,7 +275,7 @@
.warning {
&.soft {
--btn-clr: var(--clr-theme-warn-on-container);
--btn-clr: var(--clr-theme-warn-on-soft);
--btn-bg: var(--clr-scale-warn-80);
/* if button */
&:not(.not-clickable, &:disabled):hover {
@ -330,7 +298,7 @@
.purple {
&.soft {
--btn-clr: var(--clr-theme-purp-on-container);
--btn-clr: var(--clr-theme-purp-on-soft);
--btn-bg: var(--clr-scale-purp-80);
/* if button */
&:not(.not-clickable, &:disabled):hover {

View File

@ -47,6 +47,11 @@
function toggleFiles() {
showFiles = !showFiles;
if (!showFiles && branch) {
if (commit.description != description) {
branchController.updateCommitMessage(branch.id, commit.id, description);
}
}
if (showFiles) loadFiles();
}
@ -56,20 +61,33 @@
}
}
function resetHeadCommit() {
function undoCommit(commit: Commit | RemoteCommit) {
if (!branch || !$baseBranch) {
console.error('Unable to reset head commit');
console.error('Unable to undo commit');
return;
}
if (branch.commits.length > 1) {
branchController.resetBranch(branch.id, branch.commits[1].id);
} else if (branch.commits.length === 1 && $baseBranch) {
branchController.resetBranch(branch.id, $baseBranch.baseSha);
}
branchController.undoCommit(branch.id, commit.id);
}
const isUndoable = isHeadCommit && !isUnapplied;
function insertBlankCommit(commit: Commit | RemoteCommit, offset: number) {
if (!branch || !$baseBranch) {
console.error('Unable to insert commit');
return;
}
branchController.insertBlankCommit(branch.id, commit.id, offset);
}
function reorderCommit(commit: Commit | RemoteCommit, offset: number) {
if (!branch || !$baseBranch) {
console.error('Unable to move commit');
return;
}
branchController.reorderCommit(branch.id, commit.id, offset);
}
const isUndoable = !isUnapplied;
const hasCommitUrl = !commit.isLocal && commitUrl;
let description = commit.description;
</script>
<div
@ -81,33 +99,55 @@
class="commit"
class:is-commit-open={showFiles}
>
{#if isUndoable && showFiles}
<div class="commit__edit_description">
<textarea
placeholder="commit message here"
bind:value={description}
rows={commit.description.split('\n').length + 1}
></textarea>
</div>
{/if}
<div class="commit__header" on:click={toggleFiles} on:keyup={onKeyup} role="button" tabindex="0">
<div class="commit__message">
<div class="commit__row">
<span class="commit__title text-semibold text-base-12" class:truncate={!showFiles}>
{commit.descriptionTitle}
</span>
{#if isUndoable && !showFiles}
<Tag
style="ghost"
kind="solid"
icon="undo-small"
clickable
on:click={(e) => {
currentCommitMessage.set(commit.description);
e.stopPropagation();
resetHeadCommit();
}}>Undo</Tag
>
{/if}
</div>
{#if showFiles && commit.descriptionBody}
<div class="commit__row" transition:slide={{ duration: 100 }}>
<span class="commit__body text-base-body-12">
{commit.descriptionBody}
</span>
{#if !showFiles}
<div class="commit__id">
<code>{commit.id.substring(0, 6)}</code>
</div>
{/if}
<div class="commit__row">
{#if isUndoable}
{#if !showFiles}
{#if commit.descriptionTitle}
<span class="commit__title text-semibold text-base-12" class:truncate={!showFiles}>
{commit.descriptionTitle}
</span>
{:else}
<span
class="commit__title_no_desc text-base-12 text-zinc-400"
class:truncate={!showFiles}
>
<i>empty commit message</i>
</span>
{/if}
<Tag
style="ghost"
kind="solid"
icon="undo-small"
clickable
on:click={(e) => {
currentCommitMessage.set(commit.description);
e.stopPropagation();
undoCommit(commit);
}}>Undo</Tag
>
{/if}
{:else}
<span class="commit__title text-base-12" class:truncate={!showFiles}>
{commit.descriptionTitle}
</span>
{/if}
</div>
</div>
<div class="commit__row">
<div class="commit__author">
@ -130,12 +170,48 @@
{#if showFiles}
<div class="files-container" transition:slide={{ duration: 100 }}>
<BranchFilesList {files} {isUnapplied} readonly />
<BranchFilesList {files} {isUnapplied} />
</div>
{#if hasCommitUrl || isUndoable}
<div class="files__footer">
{#if isUndoable}
<Tag
style="ghost"
kind="solid"
clickable
on:click={(e) => {
e.stopPropagation();
reorderCommit(commit, -1);
}}>Move Up</Tag
>
<Tag
style="ghost"
kind="solid"
clickable
on:click={(e) => {
e.stopPropagation();
reorderCommit(commit, 1);
}}>Move Down</Tag
>
<Tag
style="ghost"
kind="solid"
clickable
on:click={(e) => {
e.stopPropagation();
insertBlankCommit(commit, -1);
}}>Add Before</Tag
>
<Tag
style="ghost"
kind="solid"
clickable
on:click={(e) => {
e.stopPropagation();
insertBlankCommit(commit, 1);
}}>Add After</Tag
>
<Tag
style="ghost"
kind="solid"
@ -144,7 +220,7 @@
on:click={(e) => {
currentCommitMessage.set(commit.description);
e.stopPropagation();
resetHeadCommit();
undoCommit(commit);
}}>Undo</Tag
>
{/if}
@ -221,6 +297,11 @@
color: var(--clr-scale-ntrl-0);
width: 100%;
}
.commit__title_no_desc {
flex: 1;
display: block;
width: 100%;
}
.commit__body {
flex: 1;
@ -237,6 +318,31 @@
gap: var(--size-8);
}
.commit__edit_description {
width: 100%;
}
.commit__edit_description textarea {
width: 100%;
padding: 10px 14px;
margin: 0;
border-bottom: 1px solid #dddddd;
}
.commit__id {
display: flex;
align-items: center;
justify-content: center;
margin-top: -14px;
}
.commit__id > code {
background-color: #eeeeee;
padding: 1px 12px;
color: #888888;
font-size: x-small;
border-radius: 0px 0px 6px 6px;
margin-bottom: -8px;
}
.commit__author {
display: block;
flex: 1;

View File

@ -6,8 +6,15 @@
import { dropzone } from '$lib/dragging/dropzone';
import { getContext, getContextStore } from '$lib/utils/context';
import { BranchController } from '$lib/vbranches/branchController';
import { filesToOwnership } from '$lib/vbranches/ownership';
import { RemoteCommit, Branch, type Commit, BaseBranch } from '$lib/vbranches/types';
import { filesToOwnership, filesToSimpleOwnership } from '$lib/vbranches/ownership';
import {
RemoteCommit,
Branch,
type Commit,
BaseBranch,
LocalFile,
RemoteFile
} from '$lib/vbranches/types';
export let commit: Commit | RemoteCommit;
export let isHeadCommit: boolean;
@ -32,11 +39,6 @@
return false;
}
// only allow to amend the head commit
if (commit.id != $branch.commits.at(0)?.id) {
return false;
}
if (data instanceof DraggableHunk && data.branchId == $branch.id) {
return true;
} else if (data instanceof DraggableFile && data.branchId == $branch.id) {
@ -47,14 +49,25 @@
};
}
function onAmend(data: DraggableFile | DraggableHunk) {
if (data instanceof DraggableHunk) {
const newOwnership = `${data.hunk.filePath}:${data.hunk.id}`;
branchController.amendBranch($branch.id, newOwnership);
} else if (data instanceof DraggableFile) {
const newOwnership = filesToOwnership(data.files);
branchController.amendBranch($branch.id, newOwnership);
}
function onAmend(commit: Commit | RemoteCommit) {
return (data: any) => {
if (data instanceof DraggableHunk) {
const newOwnership = `${data.hunk.filePath}:${data.hunk.id}`;
branchController.amendBranch($branch.id, commit.id, newOwnership);
} else if (data instanceof DraggableFile) {
if (data.file instanceof LocalFile) {
// this is an uncommitted file change being amended to a previous commit
const newOwnership = filesToOwnership(data.files);
branchController.amendBranch($branch.id, commit.id, newOwnership);
} else if (data.file instanceof RemoteFile) {
// this is a file from a commit, rather than an uncommitted file
const newOwnership = filesToSimpleOwnership(data.files);
if (data.commit) {
branchController.moveCommitFile($branch.id, data.commit.id, commit.id, newOwnership);
}
}
}
};
}
function acceptSquash(commit: Commit | RemoteCommit) {
@ -104,7 +117,7 @@
active: 'amend-dz-active',
hover: 'amend-dz-hover',
accepts: acceptAmend(commit),
onDrop: onAmend
onDrop: onAmend(commit)
}}
use:dropzone={{
active: 'squash-dz-active',

View File

@ -4,7 +4,7 @@
import LargeDiffMessage from './LargeDiffMessage.svelte';
import { computeAddedRemovedByHunk } from '$lib/utils/metrics';
import { tooltip } from '$lib/utils/tooltip';
import { getLocalCommits } from '$lib/vbranches/contexts';
import { getLocalCommits, getRemoteCommits } from '$lib/vbranches/contexts';
import { getLockText } from '$lib/vbranches/tooltip';
import type { HunkSection, ContentSection } from '$lib/utils/fileSections';
@ -21,6 +21,9 @@
$: minWidth = getGutterMinWidth(maxLineNumber);
const localCommits = isFileLocked ? getLocalCommits() : undefined;
const remoteCommits = isFileLocked ? getRemoteCommits() : undefined;
const commits = isFileLocked ? ($localCommits || []).concat($remoteCommits || []) : undefined;
let alwaysShow = false;
function getGutterMinWidth(max: number) {
@ -52,10 +55,10 @@
<div class="indicators text-base-11">
<span class="added">+{added}</span>
<span class="removed">-{removed}</span>
{#if section.hunk.lockedTo && $localCommits}
{#if section.hunk.lockedTo && section.hunk.lockedTo.length > 0 && commits}
<div
use:tooltip={{
text: getLockText(section.hunk.lockedTo, $localCommits),
text: getLockText(section.hunk.lockedTo, commits),
delay: 500
}}
>

View File

@ -111,7 +111,7 @@
}
}}
use:draggable={{
data: new DraggableFile($branch?.id || '', file, selectedFiles),
data: new DraggableFile($branch?.id || '', file, $commit, selectedFiles),
disabled: readonly || isUnapplied,
viewportId: 'board-viewport',
selector: '.selected-draggable'

View File

@ -0,0 +1,90 @@
<script lang="ts">
import Button from './Button.svelte';
import { invoke } from '$lib/backend/ipc';
import { getContext } from '$lib/utils/context';
import { toHumanReadableTime } from '$lib/utils/time';
import { VirtualBranchService } from '$lib/vbranches/virtualBranch';
import { onMount } from 'svelte';
export let projectId: string;
const snapshotsLimit = 30;
const vbranchService = getContext(VirtualBranchService);
vbranchService.activeBranches.subscribe(() => {
listSnapshots(projectId, snapshotsLimit);
});
type Trailer = {
key: string;
value: string;
};
type SnapshotDetails = {
title: string;
operation: string;
body: string | undefined;
trailers: Trailer[];
};
type Snapshot = {
id: string;
details: SnapshotDetails | undefined;
createdAt: number;
};
let snapshots: Snapshot[] = [];
async function listSnapshots(projectId: string, limit: number) {
const resp = await invoke<Snapshot[]>('list_snapshots', {
projectId: projectId,
limit: limit
});
console.log(resp);
snapshots = resp;
}
async function restoreSnapshot(projectId: string, sha: string) {
const resp = await invoke<string>('restore_snapshot', {
projectId: projectId,
sha: sha
});
console.log(resp);
}
onMount(async () => {
listSnapshots(projectId, snapshotsLimit);
});
</script>
<div class="container">
{#each snapshots as entry, idx}
<div class="card">
<div class="entry">
<div>
{entry.details?.operation}
</div>
<div>
<span>
{toHumanReadableTime(entry.createdAt)}
</span>
{#if idx != 0}
<Button
style="pop"
size="tag"
icon="undo-small"
on:click={async () => await restoreSnapshot(projectId, entry.id)}>restore</Button
>
{/if}
</div>
</div>
</div>
{/each}
</div>
<style>
.container {
width: 50rem;
padding: 0.5rem;
border-left-width: 1px;
overflow-y: auto;
}
.entry {
flex: auto;
flex-direction: column;
}
</style>

View File

@ -158,19 +158,19 @@
}
&.error {
background-color: var(--clr-theme-err-container);
background-color: var(--clr-theme-err-bg);
}
&.pop {
background-color: var(--clr-theme-pop-container);
background-color: var(--clr-theme-pop-bg);
}
&.warning {
background-color: var(--clr-theme-warn-container);
background-color: var(--clr-theme-warn-bg);
}
&.success {
background-color: var(--clr-theme-succ-container);
background-color: var(--clr-theme-succ-bg);
}
}

View File

@ -87,7 +87,7 @@
color: var(--clr-scale-ntrl-0);
gap: var(--size-12);
padding: var(--size-20);
background-color: var(--clr-theme-err-container);
background-color: var(--clr-theme-err-bg);
border-radius: var(--radius-m);
margin-bottom: var(--size-12);
}

View File

@ -93,11 +93,11 @@
}
.success {
background: var(--clr-theme-pop-container);
background: var(--clr-theme-pop-bg);
}
.error {
background: var(--clr-theme-warn-container);
background: var(--clr-theme-warn-bg);
}
.extra-padding {
padding: var(--size-20);

View File

@ -44,7 +44,7 @@
opacity: 0.5;
}
.success.setup-feature {
background: var(--clr-theme-pop-container, #f3fcfb);
background: var(--clr-theme-pop-bg);
}
.setup-feature__content {

View File

@ -40,7 +40,7 @@
color: var(--clr-scale-ntrl-0);
gap: var(--size-12);
padding: var(--size-20);
background-color: var(--clr-theme-err-container);
background-color: var(--clr-theme-err-bg);
border-radius: var(--radius-m);
margin-bottom: var(--size-12);
}

View File

@ -114,7 +114,7 @@
.pop {
&.soft {
color: var(--clr-theme-pop-on-container);
color: var(--clr-theme-pop-on-soft);
background: var(--clr-scale-pop-80);
/* if button */
&:not(.not-button, &:disabled):hover {
@ -134,7 +134,7 @@
.success {
&.soft {
color: var(--clr-theme-succ-on-container);
color: var(--clr-theme-succ-on-soft);
background: var(--clr-scale-succ-80);
/* if button */
&:not(.not-button, &:disabled):hover {
@ -154,7 +154,7 @@
.error {
&.soft {
color: var(--clr-theme-err-on-container);
color: var(--clr-theme-err-on-soft);
background: var(--clr-scale-err-80);
/* if button */
&:not(.not-button, &:disabled):hover {
@ -174,7 +174,7 @@
.warning {
&.soft {
color: var(--clr-theme-warn-on-container);
color: var(--clr-theme-warn-on-soft);
background: var(--clr-scale-warn-80);
/* if button */
&:not(.not-button, &:disabled):hover {
@ -194,7 +194,7 @@
.purple {
&.soft {
color: var(--clr-theme-purp-on-container);
color: var(--clr-theme-purp-on-soft);
background: var(--clr-scale-purp-80);
/* if button */
&:not(.not-button, &:disabled):hover {
@ -214,43 +214,15 @@
/* modifiers */
.not-button {
cursor: default;
user-select: none;
}
.disabled {
cursor: default;
pointer-events: none;
/* opacity: 0.5; */
opacity: 0.6;
}
&.neutral.solid,
&.pop.solid,
&.success.solid,
&.error.solid,
&.warning.solid,
&.purple.solid {
color: var(--clr-text-2);
background: oklch(from var(--clr-scale-ntrl-60) l c h / 0.15);
}
&.neutral.soft,
&.pop.soft,
&.success.soft,
&.error.soft,
&.warning.soft,
&.purple.soft {
color: var(--clr-text-2);
background: oklch(from var(--clr-scale-ntrl-60) l c h / 0.15);
}
&.ghost {
color: var(--clr-text-2);
}
&.ghost.solid {
border: 1px solid oklch(from var(--clr-scale-ntrl-0) l c h / 0.1);
}
.not-button {
cursor: default;
user-select: none;
}
.reversedDirection {

View File

@ -35,6 +35,15 @@ export function appErrorReportingEnabled() {
return persisted(true, 'appErrorReportingEnabled');
}
/**
* Provides a writable store for obtaining or setting the current state of non-anonemous application metrics.
* The setting can be enabled or disabled by setting the value of the store to true or false.
* @returns A writable store with the appNonAnonMetricsEnabled config.
*/
export function appNonAnonMetricsEnabled() {
return persisted(false, 'appNonAnonMetricsEnabled');
}
function persisted<T>(initial: T, key: string): Writable<T> & { onDisk: () => Promise<T> } {
async function setAndPersist(value: T, set: (value: T) => void) {
await store.set(key, value);

View File

@ -1,5 +1,5 @@
import { get, type Readable } from 'svelte/store';
import type { AnyFile, Commit, Hunk, RemoteCommit } from '../vbranches/types';
import type { AnyCommit, AnyFile, Commit, Hunk, RemoteCommit } from '../vbranches/types';
export function nonDraggable() {
return {
@ -18,7 +18,8 @@ export class DraggableHunk {
export class DraggableFile {
constructor(
public readonly branchId: string,
private file: AnyFile,
public file: AnyFile,
public commit: AnyCommit | undefined,
private selection: Readable<AnyFile[]> | undefined
) {}

View File

@ -22,7 +22,8 @@ export function showToast(toast: Toast) {
}
export function showError(title: string, err: any) {
if (err.message) showToast({ title, errorMessage: err.message, style: 'error' });
const errorMessage = err.message ? err.message : err;
showToast({ title, errorMessage: errorMessage, style: 'error' });
}
export function dismissToast(messageId: string | undefined) {

View File

@ -17,6 +17,7 @@ export interface Settings {
zoom: number;
scrollbarVisabilityOnHover: boolean;
tabSize: number;
showHistoryView: boolean;
}
const defaults: Settings = {
@ -31,7 +32,8 @@ const defaults: Settings = {
stashedBranchesHeight: 150,
zoom: 1,
scrollbarVisabilityOnHover: false,
tabSize: 4
tabSize: 4,
showHistoryView: false
};
export function loadUserSettings(): Writable<Settings> {

View File

@ -0,0 +1,4 @@
// If a value occurs > 1 times then all but one will fail this condition.
export function unique(value: any, index: number, array: any[]) {
return array.indexOf(value) === index;
}

View File

@ -1,3 +1,7 @@
export function isDefined<T>(file: T | undefined): file is T {
export function isDefined<T>(file: T | undefined | null): file is T {
return file !== undefined;
}
export function notNull<T>(file: T | undefined | null): file is T {
return file !== null;
}

View File

@ -293,11 +293,12 @@ export class BranchController {
}
}
async amendBranch(branchId: string, ownership: string) {
async amendBranch(branchId: string, commitOid: string, ownership: string) {
try {
await invoke<void>('amend_virtual_branch', {
projectId: this.projectId,
branchId,
commitOid,
ownership
});
} catch (err: any) {
@ -305,6 +306,76 @@ export class BranchController {
}
}
async moveCommitFile(
branchId: string,
fromCommitOid: string,
toCommitOid: string,
ownership: string
) {
try {
await invoke<void>('move_commit_file', {
projectId: this.projectId,
branchId,
fromCommitOid,
toCommitOid,
ownership
});
} catch (err: any) {
showError('Failed to amend commit', err);
}
}
async undoCommit(branchId: string, commitOid: string) {
try {
await invoke<void>('undo_commit', {
projectId: this.projectId,
branchId,
commitOid
});
} catch (err: any) {
showError('Failed to amend commit', err);
}
}
async updateCommitMessage(branchId: string, commitOid: string, message: string) {
try {
await invoke<void>('update_commit_message', {
projectId: this.projectId,
branchId,
commitOid,
message
});
} catch (err: any) {
showError('Failed to change commit message', err);
}
}
async insertBlankCommit(branchId: string, commitOid: string, offset: number) {
try {
await invoke<void>('insert_blank_commit', {
projectId: this.projectId,
branchId,
commitOid,
offset
});
} catch (err: any) {
showError('Failed to insert blank commit', err);
}
}
async reorderCommit(branchId: string, commitOid: string, offset: number) {
try {
await invoke<void>('reorder_commit', {
projectId: this.projectId,
branchId,
commitOid,
offset
});
} catch (err: any) {
showError('Failed to reorder blank commit', err);
}
}
async moveCommit(targetBranchId: string, commitOid: string) {
try {
await invoke<void>('move_commit', {

View File

@ -1,4 +1,4 @@
import type { Branch, AnyFile, Hunk, RemoteHunk } from './types';
import type { Branch, AnyFile, Hunk, RemoteHunk, RemoteFile } from './types';
export function filesToOwnership(files: AnyFile[]) {
return files
@ -6,6 +6,15 @@ export function filesToOwnership(files: AnyFile[]) {
.join('\n');
}
export function filesToSimpleOwnership(files: RemoteFile[]) {
return files
.map(
(f) =>
`${f.path}:${f.hunks.map(({ new_start, new_lines }) => `${new_start}-${new_start + new_lines}`).join(',')}`
)
.join('\n');
}
// These types help keep track of what maps to what.
// TODO: refactor code for clarity, these types should not be needed
export type AnyHunk = Hunk | RemoteHunk;

View File

@ -1,13 +1,17 @@
import type { Commit } from './types';
import { HunkLock, type Commit } from './types';
import { unique } from '$lib/utils/filters';
export function getLockText(commitId: string[] | string, commits: Commit[]): string {
if (!commitId || commits === undefined) return 'Depends on a committed change';
export function getLockText(hunkLocks: HunkLock | HunkLock[] | string, commits: Commit[]): string {
if (!hunkLocks || commits === undefined) return 'Depends on a committed change';
const lockedIds = typeof commitId == 'string' ? [commitId] : (commitId as string[]);
const locks = hunkLocks instanceof HunkLock ? [hunkLocks] : (hunkLocks as HunkLock[]);
const descriptions = lockedIds
.map((id) => {
const commit = commits.find((commit) => commit.id == id);
const descriptions = locks
.filter(unique)
.map((lock) => {
const commit = commits.find((c) => {
return c.id == lock.commitId;
});
const shortCommitId = commit?.id.slice(0, 7);
if (commit) {
const shortTitle = commit.descriptionTitle?.slice(0, 35) + '...';
@ -17,5 +21,13 @@ export function getLockText(commitId: string[] | string, commits: Commit[]): str
}
})
.join('\n');
return 'Locked due to dependency on:\n' + descriptions;
const branchCount = locks.map((lock) => lock.branchId).filter(unique).length;
if (branchCount > 1) {
return (
'Warning, undefined behavior due to lock on multiple branches!\n\n' +
'Locked because changes depend on:\n' +
descriptions
);
}
return 'Locked because changes depend on:\n' + descriptions;
}

View File

@ -1,6 +1,7 @@
import 'reflect-metadata';
import { splitMessage } from '$lib/utils/commitMessage';
import { hashCode } from '$lib/utils/string';
import { isDefined, notNull } from '$lib/utils/typeguards';
import { Type, Transform } from 'class-transformer';
export type ChangeType =
@ -21,8 +22,16 @@ export class Hunk {
filePath!: string;
hash?: string;
locked!: boolean;
lockedTo!: string | undefined;
@Type(() => HunkLock)
lockedTo!: HunkLock[];
changeType!: ChangeType;
new_start!: number;
new_lines!: number;
}
export class HunkLock {
branchId!: string;
commitId!: string;
}
export type AnyFile = LocalFile | RemoteFile;
@ -58,14 +67,15 @@ export class LocalFile {
get locked(): boolean {
return this.hunks
? this.hunks.map((hunk) => hunk.lockedTo).reduce((a, b) => !!(a || b), false)
? this.hunks.map((hunk) => hunk.locked).reduce((a, b) => !!(a || b), false)
: false;
}
get lockedIds(): string[] {
get lockedIds(): HunkLock[] {
return this.hunks
.map((hunk) => hunk.lockedTo)
.filter((lockedTo): lockedTo is string => !!lockedTo);
.flatMap((hunk) => hunk.lockedTo)
.filter(notNull)
.filter(isDefined);
}
}
@ -210,6 +220,8 @@ export const UNKNOWN_COMMITS = Symbol('UnknownCommits');
export class RemoteHunk {
diff!: string;
hash?: string;
new_start!: number;
new_lines!: number;
get id(): string {
return hashCode(this.diff);
@ -250,7 +262,7 @@ export class RemoteFile {
return this.hunks.map((h) => h.id);
}
get lockedIds(): string[] {
get lockedIds(): HunkLock[] {
return [];
}

View File

@ -67,6 +67,12 @@
hotkeys.on('Backspace', (e) => {
// This prevent backspace from navigating back
e.preventDefault();
}),
hotkeys.on('$mod+Shift+H', () => {
userSettings.update((s) => ({
...s,
showHistoryView: !$userSettings.showHistoryView
}));
})
);
});

View File

@ -1,13 +1,11 @@
import { AIService } from '$lib/ai/service';
import { initPostHog } from '$lib/analytics/posthog';
import { initSentry } from '$lib/analytics/sentry';
import { initAnalyticsIfEnabled } from '$lib/analytics/analytics';
import { AuthService } from '$lib/backend/auth';
import { GitConfigService } from '$lib/backend/gitConfigService';
import { HttpClient } from '$lib/backend/httpClient';
import { ProjectService } from '$lib/backend/projects';
import { PromptService } from '$lib/backend/prompt';
import { UpdaterService } from '$lib/backend/updater';
import { appMetricsEnabled, appErrorReportingEnabled } from '$lib/config/appSettings';
import { GitHubService } from '$lib/github/service';
import { UserService } from '$lib/stores/user';
import lscache from 'lscache';
@ -30,6 +28,7 @@ export async function load() {
if (env.PUBLIC_TESTING) {
mockTauri()
}
initAnalyticsIfEnabled();
appErrorReportingEnabled()
.onDisk()

View File

@ -2,11 +2,14 @@
import { Project } from '$lib/backend/projects';
import { syncToCloud } from '$lib/backend/sync';
import { BranchService } from '$lib/branches/service';
import History from '$lib/components/History.svelte';
import Navigation from '$lib/components/Navigation.svelte';
import NoBaseBranch from '$lib/components/NoBaseBranch.svelte';
import NotOnGitButlerBranch from '$lib/components/NotOnGitButlerBranch.svelte';
import ProblemLoadingRepo from '$lib/components/ProblemLoadingRepo.svelte';
import ProjectSettingsMenuAction from '$lib/components/ProjectSettingsMenuAction.svelte';
import { SETTINGS, type Settings } from '$lib/settings/userSettings';
import { getContextStoreBySymbol } from '$lib/utils/context';
import * as hotkeys from '$lib/utils/hotkeys';
import { unsubscribe } from '$lib/utils/unsubscribe';
import { BaseBranchService, NoDefaultTarget } from '$lib/vbranches/baseBranch';
@ -33,6 +36,7 @@
$: baseBranch = baseBranchService.base;
$: baseError = baseBranchService.error;
$: projectError = projectService.error;
const userSettings = getContextStoreBySymbol<Settings>(SETTINGS);
$: setContext(VirtualBranchService, vbranchService);
$: setContext(BranchController, branchController);
@ -90,6 +94,9 @@
<div class="view-wrap" role="group" on:dragover|preventDefault>
<Navigation />
<slot />
{#if $userSettings.showHistoryView}
<History {projectId} />
{/if}
</div>
{/if}
{/key}

View File

@ -1,26 +0,0 @@
[package]
name = "gitbutler-analytics"
version = "0.0.0"
edition = "2021"
publish = false
[lib]
doctest = false
test = false
[dependencies]
gitbutler-core.workspace = true
thiserror.workspace = true
tracing = "0.1.40"
tokio.workspace = true
serde.workspace = true
serde_json = { version = "1.0", features = [ "std", "arbitrary_precision" ] }
async-trait = "0.1.79"
chrono = { version = "0.4.37", features = ["serde"] }
reqwest = { version = "0.12.2", features = ["json"] }
[lints.clippy]
all = "deny"
perf = "deny"
correctness = "deny"

View File

@ -1,100 +0,0 @@
//! A client to provide analytics.
use std::{fmt, str, sync::Arc};
use gitbutler_core::{projects::ProjectId, users::User};
mod posthog;
pub struct Config<'c> {
pub posthog_token: Option<&'c str>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum Event {
HeadChange {
project_id: ProjectId,
reference_name: String,
},
}
impl fmt::Display for Event {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Event::HeadChange {
project_id,
reference_name,
} => write!(
f,
"HeadChange(project_id: {}, reference_name: {})",
project_id, reference_name
),
}
}
}
impl Event {
pub fn project_id(&self) -> ProjectId {
match self {
Event::HeadChange { project_id, .. } => *project_id,
}
}
fn into_posthog_event(self, user: &User) -> posthog::Event {
match self {
Event::HeadChange {
project_id,
reference_name: reference,
} => {
let mut event =
posthog::Event::new("git::head_changed", &format!("user_{}", user.id));
event.insert_prop("project_id", format!("project_{}", project_id));
event.insert_prop("reference", reference);
event
}
}
}
}
/// NOTE: Needs to be `Clone` only because the watcher wants to obtain it from `tauri`.
/// It's just for dependency injection.
#[derive(Clone)]
pub struct Client {
client: Arc<dyn posthog::Client + Sync + Send>,
}
impl Client {
pub fn new(app_name: String, app_version: String, config: &Config) -> Self {
let client: Arc<dyn posthog::Client + Sync + Send> =
if let Some(posthog_token) = config.posthog_token {
let real = posthog::real::Client::new(posthog::real::ClientOptions {
api_key: posthog_token.to_string(),
app_name,
app_version,
});
let real_with_retry = posthog::retry::Client::new(real);
Arc::new(real_with_retry)
} else {
Arc::<posthog::mock::Client>::default()
};
Client { client }
}
/// Send `event` to analytics and associate it with `user` without blocking.
pub fn send_non_anonymous_event_nonblocking(&self, user: &User, event: &Event) {
let client = self.client.clone();
let event = event.clone().into_posthog_event(user);
tokio::spawn(async move {
if let Err(error) = client.capture(&[event]).await {
tracing::warn!(?error, "failed to send analytics");
}
});
}
}
impl Default for Client {
fn default() -> Self {
Self {
client: Arc::new(posthog::mock::Client),
}
}
}

View File

@ -1,67 +0,0 @@
pub mod mock;
pub mod real;
pub mod retry;
use std::collections::HashMap;
use async_trait::async_trait;
use chrono::NaiveDateTime;
use serde::Serialize;
#[async_trait]
pub trait Client {
async fn capture(&self, events: &[Event]) -> Result<(), Error>;
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("{code}: {message}")]
BadRequest { code: u16, message: String },
#[error("Connection error: {0}")]
Connection(#[from] reqwest::Error),
#[error("Serialization error: {0}")]
Serialization(#[from] serde_json::Error),
}
#[derive(Serialize, Debug, PartialEq, Eq, Clone)]
pub struct Event {
event: String,
properties: Properties,
timestamp: Option<NaiveDateTime>,
}
#[derive(Clone, Serialize, Debug, PartialEq, Eq)]
pub struct Properties {
distinct_id: String,
props: HashMap<String, serde_json::Value>,
}
impl Properties {
fn new<S: Into<String>>(distinct_id: S) -> Self {
Self {
distinct_id: distinct_id.into(),
props: HashMap::default(),
}
}
pub fn insert<K: Into<String>, P: Serialize>(&mut self, key: K, prop: P) {
let as_json =
serde_json::to_value(prop).expect("safe serialization of a analytics property");
let _ = self.props.insert(key.into(), as_json);
}
}
impl Event {
pub fn new<S: Into<String>>(event: S, distinct_id: S) -> Self {
Self {
event: event.into(),
properties: Properties::new(distinct_id),
timestamp: None,
}
}
/// Errors if `prop` fails to serialize
pub fn insert_prop<K: Into<String>, P: Serialize>(&mut self, key: K, prop: P) {
self.properties.insert(key, prop);
}
}

View File

@ -1,13 +0,0 @@
use async_trait::async_trait;
use tracing::instrument;
#[derive(Default)]
pub struct Client;
#[async_trait]
impl super::Client for Client {
#[instrument(skip(self), level = "debug")]
async fn capture(&self, _events: &[super::Event]) -> Result<(), super::Error> {
Ok(())
}
}

View File

@ -1,96 +0,0 @@
use std::time::Duration;
use async_trait::async_trait;
use chrono::NaiveDateTime;
use reqwest::{header::CONTENT_TYPE, Client as HttpClient};
use serde::Serialize;
use tracing::instrument;
const API_ENDPOINT: &str = "https://eu.posthog.com/batch/";
const TIMEOUT: &Duration = &Duration::from_millis(800);
pub struct ClientOptions {
pub app_name: String,
pub app_version: String,
pub api_key: String,
}
pub struct Client {
options: ClientOptions,
client: HttpClient,
}
impl Client {
pub fn new<C: Into<ClientOptions>>(options: C) -> Self {
let client = HttpClient::builder().timeout(*TIMEOUT).build().unwrap(); // Unwrap here is as safe as `HttpClient::new`
Client {
options: options.into(),
client,
}
}
}
#[async_trait]
impl super::Client for Client {
#[instrument(skip(self), level = "debug")]
async fn capture(&self, events: &[super::Event]) -> Result<(), super::Error> {
let events = events
.iter()
.map(|event| {
let event = &mut event.clone();
event
.properties
.insert("appName", self.options.app_name.clone());
event
.properties
.insert("appVersion", self.options.app_version.clone());
Event::from(event)
})
.collect::<Vec<_>>();
let batch = Batch {
api_key: &self.options.api_key,
batch: events.as_slice(),
};
let response = self
.client
.post(API_ENDPOINT)
.header(CONTENT_TYPE, "application/json")
.body(serde_json::to_string(&batch)?)
.send()
.await?;
if response.status().is_success() {
Ok(())
} else {
Err(super::Error::BadRequest {
code: response.status().as_u16(),
message: response.text().await.unwrap_or_default(),
})
}
}
}
#[derive(Serialize)]
struct Batch<'a> {
api_key: &'a str,
batch: &'a [Event],
}
#[derive(Serialize)]
struct Event {
event: String,
properties: super::Properties,
timestamp: Option<NaiveDateTime>,
}
impl From<&mut super::Event> for Event {
fn from(event: &mut super::Event) -> Self {
Self {
event: event.event.clone(),
properties: event.properties.clone(),
timestamp: event.timestamp,
}
}
}

View File

@ -1,118 +0,0 @@
use std::sync::Arc;
use async_trait::async_trait;
use tokio::sync::Mutex;
use tracing::instrument;
#[derive(Clone)]
pub struct Client<T: super::Client + Sync> {
inner: T,
/// Events that failed to be sent
/// and are waiting to be retried.
batch: Arc<Mutex<Vec<super::Event>>>,
}
impl<T: super::Client + Sync> Client<T> {
pub fn new(inner: T) -> Self {
Client {
inner,
batch: Arc::new(Mutex::new(Vec::new())),
}
}
}
#[async_trait]
impl<T: super::Client + Sync> super::Client for Client<T> {
#[instrument(skip(self), level = "debug")]
async fn capture(&self, events: &[super::Event]) -> Result<(), super::Error> {
let mut batch = self.batch.lock().await;
batch.extend_from_slice(events);
if let Err(error) = self.inner.capture(&batch).await {
tracing::warn!("Failed to send analytics: {}", error);
} else {
batch.clear();
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use super::{super::Client, *};
#[derive(Clone)]
struct MockClient {
sent: Arc<AtomicUsize>,
is_failing: Arc<AtomicBool>,
}
impl MockClient {
fn new() -> Self {
MockClient {
sent: Arc::new(AtomicUsize::new(0)),
is_failing: Arc::new(AtomicBool::new(false)),
}
}
fn set_failing(&self, is_failing: bool) {
self.is_failing.store(is_failing, Ordering::SeqCst);
}
fn get_sent(&self) -> usize {
self.sent.load(Ordering::SeqCst)
}
}
#[async_trait]
impl super::super::Client for MockClient {
async fn capture(&self, events: &[super::super::Event]) -> Result<(), super::super::Error> {
if self.is_failing.load(Ordering::SeqCst) {
Err(super::super::Error::BadRequest {
code: 400,
message: "Bad request".to_string(),
})
} else {
self.sent.fetch_add(events.len(), Ordering::SeqCst);
Ok(())
}
}
}
#[tokio::test]
async fn retry() {
let inner_client = MockClient::new();
let retry_client = super::Client::new(inner_client.clone());
inner_client.set_failing(true);
retry_client
.capture(&[super::super::Event::new("test", "test")])
.await
.unwrap();
assert_eq!(inner_client.get_sent(), 0);
retry_client
.capture(&[super::super::Event::new("test", "test")])
.await
.unwrap();
assert_eq!(inner_client.get_sent(), 0);
inner_client.set_failing(false);
retry_client
.capture(&[super::super::Event::new("test", "test")])
.await
.unwrap();
assert_eq!(inner_client.get_sent(), 3);
retry_client
.capture(&[super::super::Event::new("test", "test")])
.await
.unwrap();
assert_eq!(inner_client.get_sent(), 4);
}
}

View File

@ -43,15 +43,15 @@ fn score_ignores_whitespace() {
assert_score!(sig, "\t\t hel lo\n\two rld \t\t", 1.0);
}
const TEXT1: &str = include_str!("../fixtures/text1.txt");
const TEXT2: &str = include_str!("../fixtures/text2.txt");
const TEXT3: &str = include_str!("../fixtures/text3.txt");
const CODE1: &str = include_str!("../fixtures/code1.txt");
const CODE2: &str = include_str!("../fixtures/code2.txt");
const CODE3: &str = include_str!("../fixtures/code3.txt");
const CODE4: &str = include_str!("../fixtures/code4.txt");
const LARGE1: &str = include_str!("../fixtures/large1.txt");
const LARGE2: &str = include_str!("../fixtures/large2.txt");
const TEXT1: &str = include_str!("fixtures/text1.txt");
const TEXT2: &str = include_str!("fixtures/text2.txt");
const TEXT3: &str = include_str!("fixtures/text3.txt");
const CODE1: &str = include_str!("fixtures/code1.txt");
const CODE2: &str = include_str!("fixtures/code2.txt");
const CODE3: &str = include_str!("fixtures/code3.txt");
const CODE4: &str = include_str!("fixtures/code4.txt");
const LARGE1: &str = include_str!("fixtures/large1.txt");
const LARGE2: &str = include_str!("fixtures/large2.txt");
macro_rules! real_test {
($a: ident, $b: ident, are_similar) => {

View File

@ -8,7 +8,9 @@ publish = false
[dev-dependencies]
once_cell = "1.19"
pretty_assertions = "1.4"
tempfile = "3.10"
gitbutler-testsupport.workspace = true
gitbutler-git = { workspace = true, features = ["test-askpass-path" ]}
[dependencies]
toml = "0.8.12"
@ -43,6 +45,7 @@ similar = { version = "2.5.0", features = ["unicode"] }
slug = "0.1.5"
ssh-key = { version = "0.6.5", features = [ "alloc", "ed25519" ] }
ssh2 = { version = "0.9.4", features = ["vendored-openssl"] }
strum = { version = "0.26", features = ["derive"] }
log = "^0.4"
thiserror.workspace = true
tokio = { workspace = true, features = [ "rt-multi-thread", "rt", "macros" ] }
@ -52,7 +55,6 @@ urlencoding = "2.1.3"
uuid.workspace = true
walkdir = "2.5.0"
zip = "0.6.5"
tempfile = "3.10"
gitbutler-git.workspace = true
[features]

View File

@ -93,15 +93,12 @@ impl Proxy {
async fn proxy_author(&self, author: Author) -> Author {
Author {
gravatar_url: self
.proxy(&author.gravatar_url)
.await
.unwrap_or_else(|error| {
tracing::error!(gravatar_url = %author.gravatar_url, ?error, "failed to proxy gravatar url");
author.gravatar_url
}),
..author
}
gravatar_url: self.proxy(&author.gravatar_url).await.unwrap_or_else(|error| {
tracing::error!(gravatar_url = %author.gravatar_url, ?error, "failed to proxy gravatar url");
author.gravatar_url
}),
..author
}
}
async fn proxy_remote_commit(&self, commit: RemoteCommit) -> RemoteCommit {

View File

@ -1,8 +1,11 @@
use std::io::Write;
use std::path::{Path, PathBuf};
use anyhow::Result;
use bstr::BString;
use gix::dir::walk::EmissionMode;
use gix::tempfile::create_dir::Retries;
use gix::tempfile::{AutoRemove, ContainingDirectory};
use walkdir::WalkDir;
// Returns an ordered list of relative paths for files inside a directory recursively.
@ -48,3 +51,71 @@ pub fn iter_worktree_files(
.filter_map(Result::ok)
.map(|e| e.entry.rela_path))
}
/// Write a single file so that the write either fully succeeds, or fully fails,
/// assuming the containing directory already exists.
pub(crate) fn write<P: AsRef<Path>>(
file_path: P,
contents: impl AsRef<[u8]>,
) -> anyhow::Result<()> {
#[cfg(windows)]
{
Ok(std::fs::write(file_path, contents)?)
}
#[cfg(not(windows))]
{
let mut temp_file = gix::tempfile::new(
file_path.as_ref().parent().unwrap(),
ContainingDirectory::Exists,
AutoRemove::Tempfile,
)?;
temp_file.write_all(contents.as_ref())?;
Ok(persist_tempfile(temp_file, file_path)?)
}
}
/// Write a single file so that the write either fully succeeds, or fully fails,
/// and create all leading directories.
pub(crate) fn create_dirs_then_write<P: AsRef<Path>>(
file_path: P,
contents: impl AsRef<[u8]>,
) -> std::io::Result<()> {
#[cfg(windows)]
{
let dir = file_path.as_ref().parent().unwrap();
if !dir.exists() {
std::fs::create_dir_all(dir)?;
}
std::fs::write(file_path, contents)
}
#[cfg(not(windows))]
{
let mut temp_file = gix::tempfile::new(
file_path.as_ref().parent().unwrap(),
ContainingDirectory::CreateAllRaceProof(Retries::default()),
AutoRemove::Tempfile,
)?;
temp_file.write_all(contents.as_ref())?;
persist_tempfile(temp_file, file_path)
}
}
fn persist_tempfile(
tempfile: gix::tempfile::Handle<gix::tempfile::handle::Writable>,
to_path: impl AsRef<Path>,
) -> std::io::Result<()> {
match tempfile.persist(to_path) {
Ok(Some(_opened_file)) => {
// EXPERIMENT: Does this fix #3601?
#[cfg(windows)]
_opened_file.sync_all()?;
Ok(())
}
Ok(None) => unreachable!(
"BUG: a signal has caused the tempfile to be removed, but we didn't install a handler"
),
Err(err) => Err(err.error),
}
}

View File

@ -217,7 +217,8 @@ impl Repository {
// Push to the remote
remote
.push(&[&remote_refspec], Some(&mut push_options)).map_err(|error| match error {
.push(&[&remote_refspec], Some(&mut push_options))
.map_err(|error| match error {
git::Error::Network(error) => {
tracing::warn!(project_id = %self.project.id, error = %error, "failed to push gb repo");
RemoteError::Network

View File

@ -113,7 +113,8 @@ impl Helper {
}
}
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Self {
pub fn from_path(path: impl Into<PathBuf>) -> Self {
let path = path.into();
let keys = keys::Controller::from_path(&path);
let users = users::Controller::from_path(path);
let home_dir = std::env::var_os("HOME").map(PathBuf::from);

View File

@ -9,7 +9,6 @@ use tracing::instrument;
use super::Repository;
use crate::git;
use crate::virtual_branches::BranchStatus;
pub type DiffByPathMap = HashMap<PathBuf, FileDiff>;
@ -53,6 +52,7 @@ pub struct GitHunk {
#[serde(rename = "diff", serialize_with = "crate::serde::as_string_lossy")]
pub diff_lines: BString,
pub binary: bool,
pub locked_to: Box<[HunkLock]>,
pub change_type: ChangeType,
}
@ -69,6 +69,7 @@ impl GitHunk {
diff_lines: hex_id.into(),
binary: true,
change_type,
locked_to: Box::new([]),
}
}
@ -82,6 +83,7 @@ impl GitHunk {
diff_lines: Default::default(),
binary: false,
change_type: ChangeType::Modified,
locked_to: Box::new([]),
}
}
}
@ -91,6 +93,21 @@ impl GitHunk {
pub fn contains(&self, line: u32) -> bool {
self.new_start <= line && self.new_start + self.new_lines >= line
}
pub fn with_locks(mut self, locks: &[HunkLock]) -> Self {
self.locked_to = locks.to_owned().into();
self
}
}
// A hunk is locked when it depends on changes in commits that are in your
// workspace. A hunk can be locked to more than one branch if it overlaps
// with more than one committed hunk.
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Copy)]
#[serde(rename_all = "camelCase")]
pub struct HunkLock {
pub branch_id: uuid::Uuid,
pub commit_id: git::Oid,
}
#[derive(Debug, PartialEq, Clone, Serialize, Default)]
@ -298,6 +315,7 @@ fn hunks_by_filepath(repo: Option<&Repository>, diff: &git2::Diff) -> Result<Dif
diff_lines: line.into_owned(),
binary: false,
change_type,
locked_to: Box::new([]),
}
}
LineOrHexHash::HexHashOfBinaryBlob(id) => {
@ -404,12 +422,13 @@ pub fn reverse_hunk(hunk: &GitHunk) -> Option<GitHunk> {
diff_lines: diff,
binary: hunk.binary,
change_type: hunk.change_type,
locked_to: Box::new([]),
})
}
}
// TODO(ST): turning this into an iterator will trigger a cascade of changes that
// mean less unnecessary copies. It also leads to `virtual.rs` - 4k SLOC!
pub fn diff_files_into_hunks(files: DiffByPathMap) -> BranchStatus {
HashMap::from_iter(files.into_iter().map(|(path, file)| (path, file.hunks)))
pub fn diff_files_into_hunks(
files: DiffByPathMap,
) -> impl Iterator<Item = (PathBuf, Vec<GitHunk>)> {
files.into_iter().map(|(path, file)| (path, file.hunks))
}

View File

@ -20,6 +20,8 @@ pub enum Error {
Hooks(#[from] git2_hooks::HooksError),
#[error("http error: {0}")]
Http(git2::Error),
#[error("blame error: {0}")]
Blame(git2::Error),
#[error("checkout error: {0}")]
Checkout(git2::Error),
#[error(transparent)]

View File

@ -64,6 +64,8 @@ impl FromStr for Refname {
return Err(Error::NotRemote(value.to_string()));
};
// TODO(ST): use `gix` (which respects refspecs and settings) to do this transformation
// Alternatively, `git2` also has support for respecting refspecs.
let value = value.strip_prefix("refs/remotes/").unwrap();
if let Some((remote, branch)) = value.split_once('/') {

View File

@ -1,6 +1,6 @@
use std::{io::Write, path::Path, str};
use git2::Submodule;
use git2::{BlameOptions, Submodule};
use git2_hooks::HookResult;
use super::{
@ -478,6 +478,24 @@ impl Repository {
git2_hooks::hooks_post_commit(&self.0, Some(&["../.husky"]))?;
Ok(())
}
pub fn blame(
&self,
path: &Path,
min_line: u32,
max_line: u32,
oldest_commit: &Oid,
newest_commit: &Oid,
) -> Result<git2::Blame> {
let mut opts = BlameOptions::new();
opts.min_line(min_line as usize)
.max_line(max_line as usize)
.newest_commit(git2::Oid::from(*newest_commit))
.oldest_commit(git2::Oid::from(*oldest_commit));
self.0
.blame_file(path, Some(&mut opts))
.map_err(super::Error::Blame)
}
}
pub struct CheckoutTreeBuidler<'a> {

View File

@ -1,4 +1,5 @@
use anyhow::Context;
use std::path::PathBuf;
use super::{storage::Storage, PrivateKey};
@ -12,7 +13,7 @@ impl Controller {
Self { storage }
}
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Self {
pub fn from_path(path: impl Into<PathBuf>) -> Self {
Self::new(Storage::from_path(path))
}

View File

@ -1,42 +1,40 @@
use super::PrivateKey;
use crate::storage;
use std::path::PathBuf;
// TODO(ST): get rid of this type, it's more trouble than it's worth.
#[derive(Clone)]
pub struct Storage {
storage: storage::Storage,
inner: storage::Storage,
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("IO error: {0}")]
Storage(#[from] storage::Error),
#[error(transparent)]
Storage(#[from] std::io::Error),
#[error("SSH key error: {0}")]
SSHKey(#[from] ssh_key::Error),
}
impl Storage {
pub fn new(storage: storage::Storage) -> Storage {
Storage { storage }
Storage { inner: storage }
}
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Storage {
pub fn from_path(path: impl Into<PathBuf>) -> Storage {
Storage::new(storage::Storage::new(path))
}
pub fn get(&self) -> Result<Option<PrivateKey>, Error> {
self.storage
.read("keys/ed25519")
.map_err(Error::Storage)
.and_then(|s| s.map(|s| s.parse().map_err(Error::SSHKey)).transpose())
let key = self.inner.read("keys/ed25519")?;
key.map(|s| s.parse().map_err(Into::into)).transpose()
}
// TODO(ST): see if Key should rather deal with bytes instead for this kind of serialization.
pub fn create(&self, key: &PrivateKey) -> Result<(), Error> {
self.storage
.write("keys/ed25519", &key.to_string())
.map_err(Error::Storage)?;
self.storage
.write("keys/ed25519.pub", &key.public_key().to_string())
.map_err(Error::Storage)?;
self.inner.write("keys/ed25519", &key.to_string())?;
self.inner
.write("keys/ed25519.pub", &key.public_key().to_string())?;
Ok(())
}
}

View File

@ -30,6 +30,7 @@ pub mod project_repository;
pub mod projects;
pub mod reader;
pub mod sessions;
pub mod snapshots;
pub mod ssh;
pub mod storage;
pub mod types;

View File

@ -95,6 +95,8 @@ pub fn conflicting_files(repository: &Repository) -> Result<Vec<String>> {
Ok(reader.lines().map_while(Result::ok).collect())
}
/// Check if `path` is conflicting in `repository`, or if `None`, check if there is any conflict.
// TODO(ST): Should this not rather check the conflicting state in the index?
pub fn is_conflicting<P: AsRef<Path>>(repository: &Repository, path: Option<P>) -> Result<bool> {
let conflicts_path = repository.git_repository.path().join("conflicts");
if !conflicts_path.exists() {
@ -105,6 +107,7 @@ pub fn is_conflicting<P: AsRef<Path>>(repository: &Repository, path: Option<P>)
let reader = std::io::BufReader::new(file);
let mut files = reader.lines().map_ok(PathBuf::from);
if let Some(pathname) = path {
// TODO(ST): This shouldn't work on UTF8 strings.
let pathname = pathname.as_ref();
// check if pathname is one of the lines in conflicts_path file

View File

@ -61,7 +61,7 @@ impl Repository {
// XXX(qix-): We will ultimately move away from an internal repository for a variety
// XXX(qix-): of reasons, but for now, this is a simple, short-term solution that we
// XXX(qix-): can clean up later on. We're aware this isn't ideal.
if let Ok(config) = git_repository.config().as_mut(){
if let Ok(config) = git_repository.config().as_mut() {
let should_set = match config.get_bool("gitbutler.didSetPrune") {
Ok(None | Some(false)) => true,
Ok(Some(true)) => false,
@ -76,7 +76,10 @@ impl Repository {
};
if should_set {
if let Err(error) = config.set_str("gc.pruneExpire", "never").and_then(|()| config.set_bool("gitbutler.didSetPrune", true)) {
if let Err(error) = config
.set_str("gc.pruneExpire", "never")
.and_then(|()| config.set_bool("gitbutler.didSetPrune", true))
{
tracing::warn!(
"failed to set gc.auto to false for repository at {}; cannot disable gc: {}",
project.path.display(),
@ -623,6 +626,8 @@ pub enum RemoteError {
Network,
#[error("authentication failed")]
Auth,
#[error("Git failed")]
Git(#[from] git::Error),
#[error(transparent)]
Other(#[from] anyhow::Error),
}
@ -638,6 +643,9 @@ impl ErrorWithContext for RemoteError {
Code::ProjectGitAuth,
"Project remote authentication error",
),
RemoteError::Git(_) => {
error::Context::new_static(Code::ProjectGitRemote, "Git command failed")
}
RemoteError::Other(error) => {
return error.custom_context_or_root_cause().into();
}

View File

@ -46,12 +46,12 @@ impl Controller {
}
}
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Self {
let pathbuf = path.as_ref().to_path_buf();
pub fn from_path(path: impl Into<PathBuf>) -> Self {
let path = path.into();
Self {
local_data_dir: pathbuf.clone(),
projects_storage: storage::Storage::from_path(&pathbuf),
users: users::Controller::from_path(&pathbuf),
projects_storage: storage::Storage::from_path(&path),
users: users::Controller::from_path(&path),
local_data_dir: path,
watchers: None,
}
}
@ -261,10 +261,9 @@ impl Controller {
tracing::error!(project_id = %project.id, ?error, "failed to remove .git/gitbutler.json data",);
}
let virtual_branches_path = project.path.join(".git/virtual_branches.toml");
if virtual_branches_path.exists() {
if let Err(error) = std::fs::remove_file(virtual_branches_path) {
tracing::error!(project_id = %project.id, ?error, "failed to remove .git/virtual_branches.toml data",);
if project.gb_dir().exists() {
if let Err(error) = std::fs::remove_dir_all(project.gb_dir()) {
tracing::error!(project_id = %project.id, ?error, "failed to remove {:?} on project delete", project.gb_dir());
}
}

View File

@ -10,10 +10,9 @@ use crate::{git, id::Id, types::default_true::DefaultTrue};
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
pub enum AuthKey {
#[cfg_attr(not(windows), default)]
Default,
Generated,
#[cfg_attr(windows, default)]
#[default]
SystemExecutable,
GitCredentialsHelper,
Local {
@ -83,6 +82,8 @@ pub struct Project {
pub project_data_last_fetch: Option<FetchResult>,
#[serde(default)]
pub omit_certificate_check: Option<bool>,
#[serde(default)]
pub enable_snapshots: Option<bool>,
}
impl AsRef<Project> for Project {

View File

@ -1,4 +1,5 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use crate::{
projects::{project, ProjectId},
@ -9,7 +10,7 @@ const PROJECTS_FILE: &str = "projects.json";
#[derive(Debug, Clone)]
pub struct Storage {
storage: storage::Storage,
inner: storage::Storage,
}
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
@ -30,7 +31,7 @@ pub struct UpdateRequest {
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(transparent)]
Storage(#[from] storage::Error),
Storage(#[from] std::io::Error),
#[error(transparent)]
Json(#[from] serde_json::Error),
#[error("project not found")]
@ -38,16 +39,16 @@ pub enum Error {
}
impl Storage {
pub fn new(storage: storage::Storage) -> Storage {
Storage { storage }
pub fn new(storage: storage::Storage) -> Self {
Self { inner: storage }
}
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Storage {
Storage::new(storage::Storage::new(path))
pub fn from_path(path: impl Into<PathBuf>) -> Self {
Self::new(storage::Storage::new(path))
}
pub fn list(&self) -> Result<Vec<project::Project>, Error> {
match self.storage.read(PROJECTS_FILE)? {
match self.inner.read(PROJECTS_FILE)? {
Some(projects) => {
let all_projects: Vec<project::Project> = serde_json::from_str(&projects)?;
let all_projects: Vec<project::Project> = all_projects
@ -128,7 +129,7 @@ impl Storage {
project.omit_certificate_check = Some(omit_certificate_check);
}
self.storage
self.inner
.write(PROJECTS_FILE, &serde_json::to_string_pretty(&projects)?)?;
Ok(projects
@ -142,7 +143,7 @@ impl Storage {
let mut projects = self.list()?;
if let Some(index) = projects.iter().position(|p| p.id == *id) {
projects.remove(index);
self.storage
self.inner
.write(PROJECTS_FILE, &serde_json::to_string_pretty(&projects)?)?;
}
Ok(())
@ -152,7 +153,7 @@ impl Storage {
let mut projects = self.list()?;
projects.push(project.clone());
let projects = serde_json::to_string_pretty(&projects)?;
self.storage.write(PROJECTS_FILE, &projects)?;
self.inner.write(PROJECTS_FILE, &projects)?;
Ok(())
}
}

View File

@ -0,0 +1,322 @@
use anyhow::anyhow;
use anyhow::Result;
use itertools::Itertools;
use serde::Deserialize;
use std::fmt;
use std::fmt::Display;
use std::fmt::Formatter;
use std::str::FromStr;
use strum::EnumString;
use serde::Serialize;
/// A snapshot of the repository and virtual branches state that GitButler can restore to.
/// It captures the state of the working directory, virtual branches and commits.
#[derive(Debug, PartialEq, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Snapshot {
/// The sha of the commit that represents the snapshot
pub id: String,
/// Snapshot creation time in epoch milliseconds
pub created_at: i64,
/// Snapshot details as persisted in the commit message
pub details: Option<SnapshotDetails>,
}
/// The payload of a snapshot commit
///
/// This is persisted as a commit message in the title, body and trailers format (https://git-scm.com/docs/git-interpret-trailers)
#[derive(Debug, PartialEq, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SnapshotDetails {
/// The version of the snapshot format
pub version: Version,
/// The type of operation that was performed just before the snapshot was created
pub operation: OperationType,
/// The title / lablel of the snapshot
pub title: String,
/// Additional text describing the snapshot
pub body: Option<String>,
/// Additional key value pairs that describe the snapshot
pub trailers: Vec<Trailer>,
}
impl SnapshotDetails {
pub fn new(operation: OperationType) -> Self {
let title = operation.to_string();
SnapshotDetails {
version: Default::default(),
operation,
title,
body: None,
trailers: vec![],
}
}
}
impl FromStr for SnapshotDetails {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let message_lines: Vec<&str> = s.lines().collect();
let mut split: Vec<Vec<&str>> = message_lines
.split(|line| line.is_empty())
.map(|s| s.to_vec())
.collect();
let title = split.remove(0).join("\n");
let mut trailers: Vec<Trailer> = split
.pop()
.ok_or(anyhow!("No trailers found on snapshot commit message"))?
.iter()
.map(|s| Trailer::from_str(s))
.filter_map(Result::ok)
.collect();
let body = split.iter().map(|v| v.join("\n")).join("\n\n");
let body = if body.is_empty() { None } else { Some(body) };
let version = Version::from_str(
&trailers
.iter()
.find(|t| t.key == "Version")
.cloned()
.ok_or(anyhow!("No version found on snapshot commit message"))?
.value,
)?;
let operation = OperationType::from_str(
&trailers
.iter()
.find(|t| t.key == "Operation")
.cloned()
.ok_or(anyhow!("No operation found on snapshot commit message"))?
.value,
)
.unwrap_or(Default::default());
println!("Operation: {:?}", operation);
// remove the version and operation attributes from the trailers since they have dedicated fields
trailers.retain(|t| t.key != "Version" && t.key != "Operation");
Ok(SnapshotDetails {
version,
operation,
title,
body,
trailers,
})
}
}
impl Display for SnapshotDetails {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
writeln!(f, "{}", self.title)?;
writeln!(f)?;
if let Some(body) = &self.body {
writeln!(f, "{}", body)?;
writeln!(f)?;
}
writeln!(f, "Version: {}", self.version)?;
writeln!(f, "Operation: {}", self.operation)?;
for line in &self.trailers {
writeln!(f, "{}", line)?;
}
Ok(())
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, EnumString, Default)]
pub enum OperationType {
CreateCommit,
CreateBranch,
SetBaseBranch,
MergeUpstream,
UpdateWorkspaceBase,
MoveHunk,
UpdateBranchName,
UpdateBranchNotes,
ReorderBranches,
SelectDefaultVirtualBranch,
UpdateBranchRemoteName,
GenericBranchUpdate,
DeleteBranch,
ApplyBranch,
DiscardHunk,
DiscardFile,
AmendCommit,
UndoCommit,
UnapplyBranch,
CherryPick,
SquashCommit,
UpdateCommitMessage,
MoveCommit,
RestoreFromSnapshot,
ReorderCommit,
InsertBlankCommit,
MoveCommitFile,
#[default]
Unknown,
}
impl fmt::Display for OperationType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Debug, PartialEq, Clone, Serialize)]
pub struct Version(u32);
impl Default for Version {
fn default() -> Self {
Version(1)
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl FromStr for Version {
type Err = std::num::ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Version(u32::from_str(s)?))
}
}
/// Represents a key value pair stored in a snapshot.
/// Using the git trailer format (https://git-scm.com/docs/git-interpret-trailers)
#[derive(Debug, PartialEq, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Trailer {
/// Trailer key
pub key: String,
/// Trailer value
pub value: String,
}
impl Display for Trailer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}: {}", self.key, self.value)
}
}
impl FromStr for Trailer {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parts: Vec<&str> = s.splitn(2, ':').collect();
if parts.len() != 2 {
return Err(anyhow!("Invalid trailer format"));
}
Ok(Self {
key: parts[0].trim().to_string(),
value: parts[1].trim().to_string(),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_trailer_display() {
let trailer = Trailer {
key: "foo".to_string(),
value: "bar".to_string(),
};
assert_eq!(format!("{}", trailer), "foo: bar");
}
#[test]
fn test_trailer_from_str() {
let s = "foo: bar";
let trailer = Trailer::from_str(s).unwrap();
assert_eq!(trailer.key, "foo");
assert_eq!(trailer.value, "bar");
}
#[test]
fn test_trailer_from_str_invalid() {
let s = "foobar";
let result = Trailer::from_str(s);
assert!(result.is_err());
}
#[test]
fn test_version_from_trailer() {
let s = "Version: 1";
let trailer = Trailer::from_str(s).unwrap();
let version = Version::from_str(&trailer.value).unwrap();
assert_eq!(version.0, 1);
}
#[test]
fn test_version_invalid() {
let s = "Version: -1";
let trailer = Trailer::from_str(s).unwrap();
let version = Version::from_str(&trailer.value);
assert!(version.is_err());
}
#[test]
fn test_operation_type_from_trailer() {
let s = "Operation: CreateCommit";
let trailer = Trailer::from_str(s).unwrap();
let operation = OperationType::from_str(&trailer.value).unwrap();
assert_eq!(operation, OperationType::CreateCommit);
}
#[test]
fn test_operation_unknown() {
let commit_message = "Create a new snapshot\n\nBody text 1\nBody text2\n\nBody text 3\n\nVersion: 1\nOperation: Asdf\nFoo: Bar\n";
let details = SnapshotDetails::from_str(commit_message).unwrap();
assert_eq!(details.version.0, 1);
assert_eq!(details.operation, OperationType::Unknown);
assert_eq!(details.title, "Create a new snapshot");
assert_eq!(
details.body,
Some("Body text 1\nBody text2\n\nBody text 3".to_string())
);
assert_eq!(
details.trailers,
vec![Trailer {
key: "Foo".to_string(),
value: "Bar".to_string(),
}]
);
}
#[test]
fn test_new_snapshot() {
let commit_sha = "1234567890".to_string();
let commit_message =
"Create a new snapshot\n\nBody text 1\nBody text2\n\nBody text 3\n\nVersion: 1\nOperation: CreateCommit\nFoo: Bar\n".to_string();
let created_at = 1234567890;
let details = SnapshotDetails::from_str(&commit_message.clone()).unwrap();
let snapshot = Snapshot {
id: commit_sha.clone(),
created_at,
details: Some(details),
};
assert_eq!(snapshot.id, commit_sha);
assert_eq!(snapshot.created_at, created_at);
let details = snapshot.details.unwrap();
assert_eq!(details.version.0, 1);
assert_eq!(details.operation, OperationType::CreateCommit);
assert_eq!(details.title, "Create a new snapshot");
assert_eq!(
details.body,
Some("Body text 1\nBody text2\n\nBody text 3".to_string())
);
assert_eq!(
details.trailers,
vec![Trailer {
key: "Foo".to_string(),
value: "Bar".to_string(),
}]
);
assert_eq!(details.to_string(), commit_message);
}
}

View File

@ -0,0 +1,4 @@
pub mod entry;
mod reflog;
pub mod snapshot;
mod state;

View File

@ -0,0 +1,179 @@
use crate::fs::write;
use anyhow::Result;
use itertools::Itertools;
use std::path::PathBuf;
use crate::projects::Project;
/// Sets a reference to the oplog head commit such that snapshots are reachable and will not be garbage collected.
/// We want to achieve 2 things:
/// - The oplog must not be visible in `git log --all` as branch
/// - The oplog tree must not be garbage collected (i.e. it must be reachable)
///
/// This needs to be invoked whenever the target head or the oplog head change.
///
/// How it works:
/// First a reference gitbutler/target is created, pointing to the head of the target (trunk) branch. This is a fake branch that we don't need to care about. If it doesn't exist, it is created.
/// Then in the reflog entry logs/refs/heads/gitbutler/target we pretend that the the ref originally pointed to the oplog head commit like so:
///
/// 0000000000000000000000000000000000000000 <target branch head sha>
/// <target branch head sha> <oplog head sha>
///
/// The reflog entry is continuously updated to refer to the current target and oplog head commits.
pub fn set_reference_to_oplog(
project: &Project,
target_head_sha: &str,
oplog_head_sha: &str,
) -> Result<()> {
let repo_path = project.path.as_path();
let reflog_file_path = repo_path
.join(".git")
.join("logs")
.join("refs")
.join("heads")
.join("gitbutler")
.join("target");
if !reflog_file_path.exists() {
let repo = git2::Repository::init(repo_path)?;
let commit = repo.find_commit(git2::Oid::from_str(target_head_sha)?)?;
repo.branch("gitbutler/target", &commit, false)?;
}
if !reflog_file_path.exists() {
return Err(anyhow::anyhow!(
"Could not create gitbutler/target which is needed for undo snapshotting"
));
}
set_target_ref(&reflog_file_path, target_head_sha)?;
set_oplog_ref(&reflog_file_path, oplog_head_sha)?;
Ok(())
}
fn set_target_ref(file_path: &PathBuf, sha: &str) -> Result<()> {
// 0000000000000000000000000000000000000000 82873b54925ab268e9949557f28d070d388e7774 Kiril Videlov <kiril@videlov.com> 1714037434 +0200 branch: Created from 82873b54925ab268e9949557f28d070d388e7774
let content = std::fs::read_to_string(file_path)?;
let mut lines = content.lines().collect::<Vec<_>>();
let mut first_line = lines[0].split_whitespace().collect_vec();
let len = first_line.len();
first_line[1] = sha;
first_line[len - 1] = sha;
let binding = first_line.join(" ");
lines[0] = &binding;
let content = format!("{}\n", lines.join("\n"));
write(file_path, content)
}
fn set_oplog_ref(file_path: &PathBuf, sha: &str) -> Result<()> {
// 82873b54925ab268e9949557f28d070d388e7774 7e8eab472636a26611214bebea7d6b79c971fb8b Kiril Videlov <kiril@videlov.com> 1714044124 +0200 reset: moving to 7e8eab472636a26611214bebea7d6b79c971fb8b
let content = std::fs::read_to_string(file_path)?;
let first_line = content.lines().collect::<Vec<_>>().remove(0);
let target_ref = first_line.split_whitespace().collect_vec()[1];
let the_rest = first_line.split_whitespace().collect_vec()[2..].join(" ");
let the_rest = the_rest.replace("branch", " reset");
let mut the_rest_split = the_rest.split(':').collect_vec();
let new_msg = format!(" moving to {}", sha);
the_rest_split[1] = &new_msg;
let the_rest = the_rest_split.join(":");
let second_line = [target_ref, sha, &the_rest].join(" ");
let content = format!("{}\n", [first_line, &second_line].join("\n"));
write(file_path, content)
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
#[test]
fn test_set_target_ref() {
let (dir, commit_id) = setup_repo();
let project = Project {
path: dir.path().to_path_buf(),
..Default::default()
};
let log_file_path = dir
.path()
.join(".git")
.join("logs")
.join("refs")
.join("heads")
.join("gitbutler")
.join("target");
assert!(!log_file_path.exists());
// Set ref for the first time
assert!(set_reference_to_oplog(&project, &commit_id.to_string(), "oplog_sha").is_ok());
assert!(log_file_path.exists());
let log_file = std::fs::read_to_string(&log_file_path).unwrap();
let log_lines = log_file.lines().collect::<Vec<_>>();
assert_eq!(log_lines.len(), 2);
assert!(log_lines[0].starts_with(&format!(
"0000000000000000000000000000000000000000 {}",
commit_id
)));
assert!(log_lines[0].ends_with(&format!("branch: Created from {}", commit_id)));
assert!(log_lines[1].starts_with(&format!("{} {}", commit_id, "oplog_sha")));
assert!(log_lines[1].ends_with("reset: moving to oplog_sha"));
// Update the oplog head only
assert!(
set_reference_to_oplog(&project, &commit_id.to_string(), "another_oplog_sha").is_ok()
);
let log_file = std::fs::read_to_string(&log_file_path).unwrap();
let log_lines = log_file.lines().collect::<Vec<_>>();
assert_eq!(log_lines.len(), 2);
assert!(log_lines[0].starts_with(&format!(
"0000000000000000000000000000000000000000 {}",
commit_id
)));
assert!(log_lines[0].ends_with(&format!("branch: Created from {}", commit_id)));
println!("{:?}", log_lines[1]);
assert!(log_lines[1].starts_with(&format!("{} {}", commit_id, "another_oplog_sha")));
assert!(log_lines[1].ends_with("reset: moving to another_oplog_sha"));
// Update the target head only
assert!(set_reference_to_oplog(&project, "new_target", "another_oplog_sha").is_ok());
let log_file = std::fs::read_to_string(&log_file_path).unwrap();
let log_lines = log_file.lines().collect::<Vec<_>>();
assert_eq!(log_lines.len(), 2);
assert!(log_lines[0].starts_with(&format!(
"0000000000000000000000000000000000000000 {}",
"new_target"
)));
assert!(log_lines[0].ends_with(&format!("branch: Created from {}", "new_target")));
println!("{:?}", log_lines[1]);
assert!(log_lines[1].starts_with(&format!("{} {}", "new_target", "another_oplog_sha")));
assert!(log_lines[1].ends_with("reset: moving to another_oplog_sha"));
}
fn setup_repo() -> (tempfile::TempDir, git2::Oid) {
let dir = tempdir().unwrap();
let repo = git2::Repository::init(dir.path()).unwrap();
let file_path = dir.path().join("foo.txt");
std::fs::write(file_path, "test").unwrap();
let mut index = repo.index().unwrap();
index.add_path(&PathBuf::from("foo.txt")).unwrap();
let oid = index.write_tree().unwrap();
let name = "Your Name";
let email = "your.email@example.com";
let signature = git2::Signature::now(name, email).unwrap();
let commit_id = repo
.commit(
Some("HEAD"),
&signature,
&signature,
"initial commit",
&repo.find_tree(oid).unwrap(),
&[],
)
.unwrap();
(dir, commit_id)
}
}

View File

@ -0,0 +1,350 @@
use anyhow::anyhow;
use itertools::Itertools;
use std::fs;
use std::str::FromStr;
use anyhow::Result;
use crate::{projects::Project, virtual_branches::VirtualBranchesHandle};
use super::{
entry::{OperationType, Snapshot, SnapshotDetails, Trailer},
reflog::set_reference_to_oplog,
state::OplogHandle,
};
const SNAPSHOT_FILE_LIMIT_BYTES: u64 = 32 * 1024 * 1024;
/// Creates a snapshot of the current state of the repository and virtual branches using the given label.
///
/// If this is the first shapshot created, supporting structures are initialized:
/// - The current oplog head is persisted in `.git/gitbutler/oplog.toml`.
/// - A fake branch `gitbutler/target` is created and maintained in order to keep the oplog head reachable.
///
/// The state of virtual branches `.git/gitbutler/virtual_branches.toml` is copied to the project root so that it is snapshotted.
pub fn create(project: &Project, details: SnapshotDetails) -> Result<()> {
if project.enable_snapshots.is_none() || project.enable_snapshots == Some(false) {
return Ok(());
}
let repo_path = project.path.as_path();
let repo = git2::Repository::init(repo_path)?;
let vb_state = VirtualBranchesHandle::new(&project.gb_dir());
let default_target_sha = vb_state.get_default_target()?.sha;
let oplog_state = OplogHandle::new(&project.gb_dir());
let oplog_head_commit = match oplog_state.get_oplog_head()? {
Some(head_sha) => match repo.find_commit(git2::Oid::from_str(&head_sha)?) {
Ok(commit) => commit,
Err(_) => repo.find_commit(default_target_sha.into())?,
},
// This is the first snapshot - use the default target as starting point
None => repo.find_commit(default_target_sha.into())?,
};
// Copy virtual_branches.rs to the project root so that we snapshot it
std::fs::copy(
repo_path.join(".git/gitbutler/virtual_branches.toml"),
repo_path.join("virtual_branches.toml"),
)?;
// Exclude files that are larger than the limit (eg. database.sql which may never be intended to be committed)
let files_to_exclude = get_exclude_list(&repo)?;
// In-memory, libgit2 internal ignore rule
repo.add_ignore_rule(&files_to_exclude)?;
// Add everything in the workdir to the index
let mut index = repo.index()?;
index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)?;
index.write()?;
// Create a tree out of the index
let tree_id = index.write_tree()?;
let tree = repo.find_tree(tree_id)?;
// Construct a new commit
let name = "GitButler";
let email = "gitbutler@gitbutler.com";
let signature = git2::Signature::now(name, email).unwrap();
let new_commit_oid = repo.commit(
None,
&signature,
&signature,
&details.to_string(),
&tree,
&[&oplog_head_commit],
)?;
// Remove the copied virtual_branches.rs
std::fs::remove_file(project.path.join("virtual_branches.toml"))?;
// Reset the workdir to how it was
let integration_branch = repo
.find_branch("gitbutler/integration", git2::BranchType::Local)?
.get()
.peel_to_commit()?;
repo.reset(
&integration_branch.into_object(),
git2::ResetType::Mixed,
None,
)?;
oplog_state.set_oplog_head(new_commit_oid.to_string())?;
set_reference_to_oplog(
project,
&default_target_sha.to_string(),
&new_commit_oid.to_string(),
)?;
Ok(())
}
/// Lists the snapshots that have been created for the given repository, up to the given limit.
/// An alternative way of retrieving the snapshots would be to manually the oplog head `git log <oplog_head>` available in `.git/gitbutler/oplog.toml`.
///
/// If there are no snapshots, an empty list is returned.
pub fn list(project: &Project, limit: usize) -> Result<Vec<Snapshot>> {
let repo_path = project.path.as_path();
let repo = git2::Repository::init(repo_path)?;
let oplog_state = OplogHandle::new(&project.gb_dir());
let head_sha = oplog_state.get_oplog_head()?;
if head_sha.is_none() {
// there are no snapshots to return
return Ok(vec![]);
}
let head_sha = head_sha.unwrap();
let oplog_head_commit = repo.find_commit(git2::Oid::from_str(&head_sha)?)?;
let mut revwalk = repo.revwalk()?;
revwalk.push(oplog_head_commit.id())?;
let mut snapshots = Vec::new();
for commit_id in revwalk {
let commit_id = commit_id?;
let commit = repo.find_commit(commit_id)?;
if commit.parent_count() > 1 {
break;
}
let details = commit
.message()
.and_then(|msg| SnapshotDetails::from_str(msg).ok());
snapshots.push(Snapshot {
id: commit_id.to_string(),
details,
created_at: commit.time().seconds() * 1000,
});
if snapshots.len() >= limit {
break;
}
}
Ok(snapshots)
}
/// Reverts to a previous state of the working directory, virtual branches and commits.
/// The provided sha must refer to a valid snapshot commit.
/// Upon success, a new snapshot is created.
///
/// The state of virtual branches `.git/gitbutler/virtual_branches.toml` is restored from the snapshot.
pub fn restore(project: &Project, sha: String) -> Result<()> {
let repo_path = project.path.as_path();
let repo = git2::Repository::init(repo_path)?;
let commit = repo.find_commit(git2::Oid::from_str(&sha)?)?;
let tree = commit.tree()?;
// repo.add_ignore_rule("large.txt")?;
// Exclude files that are larger than the limit (eg. database.sql which may never be intended to be committed)
let files_to_exclude = get_exclude_list(&repo)?;
// In-memory, libgit2 internal ignore rule
repo.add_ignore_rule(&files_to_exclude)?;
// Define the checkout builder
let mut checkout_builder = git2::build::CheckoutBuilder::new();
checkout_builder.remove_untracked(true);
checkout_builder.force();
// Checkout the tree
repo.checkout_tree(tree.as_object(), Some(&mut checkout_builder))?;
// mv virtual_branches.toml from project root to .git/gitbutler
std::fs::rename(
repo_path.join("virtual_branches.toml"),
repo_path.join(".git/gitbutler/virtual_branches.toml"),
)?;
// create new snapshot
let details = SnapshotDetails {
version: Default::default(),
operation: OperationType::RestoreFromSnapshot,
title: "Restored from snapshot".to_string(),
body: None,
trailers: vec![Trailer {
key: "restored_from".to_string(),
value: sha,
}],
};
create(project, details)?;
Ok(())
}
fn get_exclude_list(repo: &git2::Repository) -> Result<String> {
let repo_path = repo
.path()
.parent()
.ok_or(anyhow!("failed to get repo path"))?;
let statuses = repo.statuses(None)?;
let mut files_to_exclude = vec![];
for entry in statuses.iter() {
if let Some(path) = entry.path() {
let path = repo_path.join(path);
if let Ok(metadata) = fs::metadata(&path) {
if metadata.is_file() && metadata.len() > SNAPSHOT_FILE_LIMIT_BYTES {
files_to_exclude.push(path);
}
}
}
}
// Exclude files that are larger than the limit (eg. database.sql which may never be intended to be committed)
let files_to_exclude = files_to_exclude
.iter()
.filter_map(|f| f.strip_prefix(repo_path).ok())
.filter_map(|f| f.to_str())
.join(" ");
Ok(files_to_exclude)
}
#[cfg(test)]
mod tests {
use std::{io::Write, path::PathBuf};
use crate::virtual_branches::Branch;
use super::*;
use tempfile::tempdir;
#[test]
fn test_create_and_restore() {
let dir = tempdir().unwrap();
let repo = git2::Repository::init(dir.path()).unwrap();
let file_path = dir.path().join("1.txt");
std::fs::write(file_path, "test").unwrap();
let file_path = dir.path().join("2.txt");
std::fs::write(file_path, "test").unwrap();
let mut index = repo.index().unwrap();
index.add_path(&PathBuf::from("1.txt")).unwrap();
index.add_path(&PathBuf::from("2.txt")).unwrap();
let oid = index.write_tree().unwrap();
let name = "Your Name";
let email = "your.email@example.com";
let signature = git2::Signature::now(name, email).unwrap();
let initial_commit = repo
.commit(
Some("HEAD"),
&signature,
&signature,
"initial commit",
&repo.find_tree(oid).unwrap(),
&[],
)
.unwrap();
// create a new branch called "gitbutler/integraion" from initial commit
repo.branch(
"gitbutler/integration",
&repo.find_commit(initial_commit).unwrap(),
false,
)
.unwrap();
let project = Project {
path: dir.path().to_path_buf(),
enable_snapshots: Some(true),
..Default::default()
};
// create gb_dir folder
std::fs::create_dir_all(project.gb_dir()).unwrap();
let vb_state = VirtualBranchesHandle::new(&project.gb_dir());
let target_sha = initial_commit.to_string();
let default_target = crate::virtual_branches::target::Target {
branch: crate::git::RemoteRefname::new("origin", "main"),
remote_url: Default::default(),
sha: crate::git::Oid::from_str(&target_sha).unwrap(),
};
vb_state.set_default_target(default_target.clone()).unwrap();
let file_path = dir.path().join("uncommitted.txt");
std::fs::write(file_path, "test").unwrap();
let file_path = dir.path().join("large.txt");
// write 33MB of random data in the file
let mut file = std::fs::File::create(file_path).unwrap();
for _ in 0..33 * 1024 {
let data = [0u8; 1024];
file.write_all(&data).unwrap();
}
// create a snapshot
create(&project, SnapshotDetails::new(OperationType::CreateCommit)).unwrap();
let snapshots = list(&project, 100).unwrap();
// The large file is still here but it will not be part of the snapshot
let file_path = dir.path().join("large.txt");
assert!(file_path.exists());
// Modify file 1, remove file 2, create file 3
let file_path = dir.path().join("1.txt");
std::fs::write(file_path, "TEST").unwrap();
let file_path = dir.path().join("2.txt");
std::fs::remove_file(file_path).unwrap();
let file_path = dir.path().join("3.txt");
std::fs::write(file_path, "something_new").unwrap();
let file_path = dir.path().join("uncommitted.txt");
std::fs::write(file_path, "TEST").unwrap();
// Create a fake branch in virtual_branches.toml
let id = crate::id::Id::from_str("9acb2a3b-cddf-47d7-b531-a7798978c237").unwrap();
vb_state
.set_branch(Branch {
id,
..Default::default()
})
.unwrap();
assert!(vb_state.get_branch(&id).is_ok());
// restore from the snapshot
restore(&project, snapshots.first().unwrap().id.clone()).unwrap();
let file_path = dir.path().join("1.txt");
let file_lines = std::fs::read_to_string(file_path).unwrap();
assert_eq!(file_lines, "test");
let file_path = dir.path().join("2.txt");
assert!(file_path.exists());
let file_lines = std::fs::read_to_string(file_path).unwrap();
assert_eq!(file_lines, "test");
let file_path = dir.path().join("3.txt");
assert!(!file_path.exists());
let file_path = dir.path().join("uncommitted.txt");
let file_lines = std::fs::read_to_string(file_path).unwrap();
assert_eq!(file_lines, "test");
// The large file is still here but it was not be part of the snapshot
let file_path = dir.path().join("large.txt");
assert!(file_path.exists());
// The fake branch is gone
assert!(vb_state.get_branch(&id).is_err());
}
}

View File

@ -0,0 +1,73 @@
use anyhow::Result;
use std::{
fs::File,
io::Read,
path::{Path, PathBuf},
};
use serde::{Deserialize, Serialize};
/// This tracks the head of the oplog, persisted in oplog.toml.
#[derive(Serialize, Deserialize, Debug, Default)]
pub struct Oplog {
/// This is the sha of the last oplog commit
pub head_sha: Option<String>,
}
pub struct OplogHandle {
/// The path to the file containing the oplog head state.
file_path: PathBuf,
}
impl OplogHandle {
/// Creates a new concurrency-safe handle to the state of the oplog.
pub fn new(base_path: &Path) -> Self {
let file_path = base_path.join("oplog.toml");
Self { file_path }
}
/// Persists the oplog head for the given repository.
///
/// Errors if the file cannot be read or written.
pub fn set_oplog_head(&self, sha: String) -> Result<()> {
let mut oplog = self.read_file()?;
oplog.head_sha = Some(sha);
self.write_file(&oplog)?;
Ok(())
}
/// Gets the oplog head sha for the given repository.
///
/// Errors if the file cannot be read or written.
pub fn get_oplog_head(&self) -> anyhow::Result<Option<String>> {
let oplog = self.read_file()?;
Ok(oplog.head_sha)
}
/// Reads and parses the state file.
///
/// If the file does not exist, it will be created.
fn read_file(&self) -> Result<Oplog> {
if !self.file_path.exists() {
return Ok(Oplog::default());
}
let mut file: File = File::open(self.file_path.as_path())?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let oplog: Oplog =
toml::from_str(&contents).map_err(|e| crate::reader::Error::ParseError {
path: self.file_path.clone(),
source: e,
})?;
Ok(oplog)
}
fn write_file(&self, oplog: &Oplog) -> anyhow::Result<()> {
write(self.file_path.as_path(), oplog)
}
}
fn write<P: AsRef<Path>>(file_path: P, oplog: &Oplog) -> anyhow::Result<()> {
let contents = toml::to_string(&oplog)?;
crate::fs::write(file_path, contents)
}

View File

@ -1,71 +1,70 @@
#[cfg(target_family = "unix")]
use std::os::unix::prelude::*;
use std::{
fs,
path::{Path, PathBuf},
sync::{Arc, RwLock},
};
#[derive(Debug, Default, Clone)]
/// A facility to read, write and delete files.
#[derive(Debug, Clone)]
pub struct Storage {
local_data_dir: Arc<RwLock<PathBuf>>,
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(transparent)]
IO(#[from] std::io::Error),
/// The directory into which all of or files will be written or read-from.
local_data_dir: PathBuf,
}
impl Storage {
pub fn new<P: AsRef<Path>>(local_data_dir: P) -> Storage {
pub fn new(local_data_dir: impl Into<PathBuf>) -> Storage {
Storage {
local_data_dir: Arc::new(RwLock::new(local_data_dir.as_ref().to_path_buf())),
local_data_dir: local_data_dir.into(),
}
}
pub fn read<P: AsRef<Path>>(&self, path: P) -> Result<Option<String>, Error> {
let local_data_dir = self.local_data_dir.read().unwrap();
let file_path = local_data_dir.join(path);
if !file_path.exists() {
return Ok(None);
/// Read the content of the file at `rela_path` which is a path relative to our root directory.
/// Return `Ok(None)` if the file doesn't exist.
// TODO(ST): make all these operations write bytes.
pub fn read(&self, rela_path: impl AsRef<Path>) -> std::io::Result<Option<String>> {
match fs::read_to_string(self.local_data_dir.join(rela_path)) {
Ok(content) => Ok(Some(content)),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(err) => Err(err),
}
let contents = fs::read_to_string(&file_path).map_err(Error::IO)?;
Ok(Some(contents))
}
pub fn write<P: AsRef<Path>>(&self, path: P, content: &str) -> Result<(), Error> {
let local_data_dir = self.local_data_dir.write().unwrap();
let file_path = local_data_dir.join(path);
let dir = file_path.parent().unwrap();
if !dir.exists() {
fs::create_dir_all(dir).map_err(Error::IO)?;
}
fs::write(&file_path, content).map_err(Error::IO)?;
// Set the permissions to be user-only. We can't actually
// do this on Windows, so we ignore that platform.
#[cfg(target_family = "unix")]
{
let metadata = fs::metadata(file_path.clone())?;
let mut permissions = metadata.permissions();
permissions.set_mode(0o600); // User read/write
fs::set_permissions(file_path.clone(), permissions)?;
}
Ok(())
/// Write `content` to `rela_path` atomically, so it's either written completely, or not at all.
/// Creates the file and intermediate directories.
///
/// ### On Synchronization
///
/// Mutating operations are assumed to be synchronized by the caller,
/// even though all writes will be atomic.
///
/// If these operations are not synchronized, they will be racy as it's undefined
/// which *whole* write will win. Thus, operations which touch multiple files and
/// need them to be consistent *need* to synchronize by some mean.
///
/// Generally, the filesystem is used for synchronization, not in-memory primitives.
pub fn write(&self, rela_path: impl AsRef<Path>, content: &str) -> std::io::Result<()> {
let file_path = self.local_data_dir.join(rela_path);
crate::fs::create_dirs_then_write(file_path, content)
}
pub fn delete<P: AsRef<Path>>(&self, path: P) -> Result<(), Error> {
let local_data_dir = self.local_data_dir.write().unwrap();
let file_path = local_data_dir.join(path);
if !file_path.exists() {
return Ok(());
}
if file_path.is_dir() {
fs::remove_dir_all(file_path.clone()).map_err(Error::IO)?;
/// Delete the file or directory at `rela_path`.
///
/// ### Panics
///
/// If a symlink is encountered.
pub fn delete(&self, rela_path: impl AsRef<Path>) -> std::io::Result<()> {
let file_path = self.local_data_dir.join(rela_path);
let md = match file_path.symlink_metadata() {
Ok(md) => md,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(()),
Err(err) => return Err(err),
};
if md.is_dir() {
fs::remove_dir_all(file_path)?;
} else if md.is_file() {
fs::remove_file(file_path)?;
} else {
fs::remove_file(file_path.clone()).map_err(Error::IO)?;
unreachable!("BUG: we do not create or work with symlinks")
}
Ok(())
}

View File

@ -1,7 +1,9 @@
use anyhow::Context;
use std::path::PathBuf;
use super::{storage::Storage, User};
/// TODO(ST): useless intermediary - remove
#[derive(Clone)]
pub struct Controller {
storage: Storage,
@ -12,7 +14,7 @@ impl Controller {
Controller { storage }
}
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Controller {
pub fn from_path(path: impl Into<PathBuf>) -> Controller {
Controller::new(Storage::from_path(path))
}

View File

@ -1,4 +1,5 @@
use anyhow::Result;
use std::path::PathBuf;
use crate::{storage, users::user};
@ -6,28 +7,28 @@ const USER_FILE: &str = "user.json";
#[derive(Debug, Clone)]
pub struct Storage {
storage: storage::Storage,
inner: storage::Storage,
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(transparent)]
Storage(#[from] storage::Error),
Storage(#[from] std::io::Error),
#[error(transparent)]
Json(#[from] serde_json::Error),
}
impl Storage {
pub fn new(storage: storage::Storage) -> Storage {
Storage { storage }
Storage { inner: storage }
}
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Storage {
pub fn from_path(path: impl Into<PathBuf>) -> Storage {
Storage::new(storage::Storage::new(path))
}
pub fn get(&self) -> Result<Option<user::User>, Error> {
match self.storage.read(USER_FILE)? {
match self.inner.read(USER_FILE)? {
Some(data) => Ok(Some(serde_json::from_str(&data)?)),
None => Ok(None),
}
@ -35,12 +36,12 @@ impl Storage {
pub fn set(&self, user: &user::User) -> Result<(), Error> {
let data = serde_json::to_string(user)?;
self.storage.write(USER_FILE, &data)?;
self.inner.write(USER_FILE, &data)?;
Ok(())
}
pub fn delete(&self) -> Result<(), Error> {
self.storage.delete(USER_FILE)?;
self.inner.delete(USER_FILE)?;
Ok(())
}
}

View File

@ -6,7 +6,7 @@ use serde::Serialize;
use super::{
branch, errors,
integration::{update_gitbutler_integration, GITBUTLER_INTEGRATION_REFERENCE},
target, BranchId, RemoteCommit, VirtualBranchesHandle,
target, BranchId, RemoteCommit, VirtualBranchHunk, VirtualBranchesHandle,
};
use crate::{
git::{self, diff},
@ -193,20 +193,21 @@ pub fn set_base_branch(
let wd_diff = diff::workdir(repo, &current_head_commit.id())?;
if !wd_diff.is_empty() || current_head_commit.id() != target.sha {
let hunks_by_filepath = super::virtual_hunks_by_filepath_from_file_diffs(
&project_repository.project().path,
&wd_diff,
);
// assign ownership to the branch
let ownership = hunks_by_filepath.values().flatten().fold(
let ownership = wd_diff.iter().fold(
BranchOwnershipClaims::default(),
|mut ownership, hunk| {
ownership.put(
&format!("{}:{}", hunk.file_path.display(), hunk.id)
|mut ownership, (file_path, diff)| {
for hunk in &diff.hunks {
ownership.put(
format!(
"{}:{}",
file_path.display(),
VirtualBranchHunk::gen_id(hunk.new_start, hunk.new_lines)
)
.parse()
.unwrap(),
);
);
}
ownership
},
);
@ -254,7 +255,7 @@ pub fn set_base_branch(
tree: super::write_tree_onto_commit(
project_repository,
current_head_commit.id(),
&diff::diff_files_into_hunks(wd_diff),
diff::diff_files_into_hunks(wd_diff),
)?,
ownership,
order: 0,
@ -267,7 +268,7 @@ pub fn set_base_branch(
set_exclude_decoration(project_repository)?;
super::integration::update_gitbutler_integration(&vb_state, project_repository)?;
update_gitbutler_integration(&vb_state, project_repository)?;
let base = target_to_base_branch(project_repository, &target)?;
Ok(base)
@ -362,197 +363,193 @@ pub fn update_base_branch(
let vb_state = VirtualBranchesHandle::new(&project_repository.project().gb_dir());
// try to update every branch
let updated_vbranches =
super::get_status_by_branch(project_repository, Some(&new_target_commit.id()))?
.0
.into_iter()
.map(|(branch, _)| branch)
.map(
|mut branch: branch::Branch| -> Result<Option<branch::Branch>> {
let branch_tree = repo.find_tree(branch.tree)?;
let updated_vbranches = super::get_status_by_branch(project_repository, None)?
.0
.into_iter()
.map(|(branch, _)| branch)
.map(
|mut branch: branch::Branch| -> Result<Option<branch::Branch>> {
let branch_tree = repo.find_tree(branch.tree)?;
let branch_head_commit = repo.find_commit(branch.head).context(format!(
"failed to find commit {} for branch {}",
branch.head, branch.id
))?;
let branch_head_tree = branch_head_commit.tree().context(format!(
"failed to find tree for commit {} for branch {}",
branch.head, branch.id
))?;
let branch_head_commit = repo.find_commit(branch.head).context(format!(
"failed to find commit {} for branch {}",
branch.head, branch.id
))?;
let branch_head_tree = branch_head_commit.tree().context(format!(
"failed to find tree for commit {} for branch {}",
branch.head, branch.id
))?;
let result_integrated_detected =
|mut branch: branch::Branch| -> Result<Option<branch::Branch>> {
// branch head tree is the same as the new target tree.
// meaning we can safely use the new target commit as the branch head.
let result_integrated_detected =
|mut branch: branch::Branch| -> Result<Option<branch::Branch>> {
// branch head tree is the same as the new target tree.
// meaning we can safely use the new target commit as the branch head.
branch.head = new_target_commit.id();
// it also means that the branch is fully integrated into the target.
// disconnect it from the upstream
branch.upstream = None;
branch.upstream_head = None;
let non_commited_files = diff::trees(
&project_repository.git_repository,
&branch_head_tree,
&branch_tree,
)?;
if non_commited_files.is_empty() {
// if there are no commited files, then the branch is fully merged
// and we can delete it.
vb_state.remove_branch(branch.id)?;
project_repository.delete_branch_reference(&branch)?;
Ok(None)
} else {
vb_state.set_branch(branch.clone())?;
Ok(Some(branch))
}
};
if branch_head_tree.id() == new_target_tree.id() {
return result_integrated_detected(branch);
}
// try to merge branch head with new target
let mut branch_tree_merge_index = repo
.merge_trees(&old_target_tree, &branch_tree, &new_target_tree)
.context(format!("failed to merge trees for branch {}", branch.id))?;
if branch_tree_merge_index.has_conflicts() {
// branch tree conflicts with new target, unapply branch for now. we'll handle it later, when user applies it back.
branch.applied = false;
vb_state.set_branch(branch.clone())?;
return Ok(Some(branch));
}
let branch_merge_index_tree_oid =
branch_tree_merge_index.write_tree_to(repo)?;
if branch_merge_index_tree_oid == new_target_tree.id() {
return result_integrated_detected(branch);
}
if branch.head == target.sha {
// there are no commits on the branch, so we can just update the head to the new target and calculate the new tree
branch.head = new_target_commit.id();
branch.tree = branch_merge_index_tree_oid;
vb_state.set_branch(branch.clone())?;
return Ok(Some(branch));
}
let mut branch_head_merge_index = repo
.merge_trees(&old_target_tree, &branch_head_tree, &new_target_tree)
.context(format!(
"failed to merge head tree for branch {}",
branch.id
))?;
// it also means that the branch is fully integrated into the target.
// disconnect it from the upstream
branch.upstream = None;
branch.upstream_head = None;
if branch_head_merge_index.has_conflicts() {
// branch commits conflict with new target, make sure the branch is
// unapplied. conflicts witll be dealt with when applying it back.
branch.applied = false;
vb_state.set_branch(branch.clone())?;
return Ok(Some(branch));
}
// branch commits do not conflict with new target, so lets merge them
let branch_head_merge_tree_oid = branch_head_merge_index
.write_tree_to(repo)
.context(format!(
"failed to write head merge index for {}",
branch.id
))?;
let ok_with_force_push = project_repository.project().ok_with_force_push;
let result_merge =
|mut branch: branch::Branch| -> Result<Option<branch::Branch>> {
// branch was pushed to upstream, and user doesn't like force pushing.
// create a merge commit to avoid the need of force pushing then.
let branch_head_merge_tree = repo
.find_tree(branch_head_merge_tree_oid)
.context("failed to find tree")?;
let new_target_head = project_repository
.commit(
user,
format!(
"Merged {}/{} into {}",
target.branch.remote(),
target.branch.branch(),
branch.name
)
.as_str(),
&branch_head_merge_tree,
&[&branch_head_commit, &new_target_commit],
signing_key,
)
.context("failed to commit merge")?;
branch.head = new_target_head;
branch.tree = branch_merge_index_tree_oid;
let non_commited_files = diff::trees(
&project_repository.git_repository,
&branch_head_tree,
&branch_tree,
)?;
if non_commited_files.is_empty() {
// if there are no commited files, then the branch is fully merged
// and we can delete it.
vb_state.remove_branch(branch.id)?;
project_repository.delete_branch_reference(&branch)?;
Ok(None)
} else {
vb_state.set_branch(branch.clone())?;
Ok(Some(branch))
};
}
};
if branch.upstream.is_some() && !ok_with_force_push {
return result_merge(branch);
}
if branch_head_tree.id() == new_target_tree.id() {
return result_integrated_detected(branch);
}
// branch was not pushed to upstream yet. attempt a rebase,
let (_, committer) = project_repository.git_signatures(user)?;
let mut rebase_options = git2::RebaseOptions::new();
rebase_options.quiet(true);
rebase_options.inmemory(true);
let mut rebase = repo
.rebase(
Some(branch.head),
Some(new_target_commit.id()),
None,
Some(&mut rebase_options),
// try to merge branch head with new target
let mut branch_tree_merge_index = repo
.merge_trees(&old_target_tree, &branch_tree, &new_target_tree)
.context(format!("failed to merge trees for branch {}", branch.id))?;
if branch_tree_merge_index.has_conflicts() {
// branch tree conflicts with new target, unapply branch for now. we'll handle it later, when user applies it back.
branch.applied = false;
vb_state.set_branch(branch.clone())?;
return Ok(Some(branch));
}
let branch_merge_index_tree_oid = branch_tree_merge_index.write_tree_to(repo)?;
if branch_merge_index_tree_oid == new_target_tree.id() {
return result_integrated_detected(branch);
}
if branch.head == target.sha {
// there are no commits on the branch, so we can just update the head to the new target and calculate the new tree
branch.head = new_target_commit.id();
branch.tree = branch_merge_index_tree_oid;
vb_state.set_branch(branch.clone())?;
return Ok(Some(branch));
}
let mut branch_head_merge_index = repo
.merge_trees(&old_target_tree, &branch_head_tree, &new_target_tree)
.context(format!(
"failed to merge head tree for branch {}",
branch.id
))?;
if branch_head_merge_index.has_conflicts() {
// branch commits conflict with new target, make sure the branch is
// unapplied. conflicts witll be dealt with when applying it back.
branch.applied = false;
vb_state.set_branch(branch.clone())?;
return Ok(Some(branch));
}
// branch commits do not conflict with new target, so lets merge them
let branch_head_merge_tree_oid = branch_head_merge_index
.write_tree_to(repo)
.context(format!(
"failed to write head merge index for {}",
branch.id
))?;
let ok_with_force_push = project_repository.project().ok_with_force_push;
let result_merge = |mut branch: branch::Branch| -> Result<Option<branch::Branch>> {
// branch was pushed to upstream, and user doesn't like force pushing.
// create a merge commit to avoid the need of force pushing then.
let branch_head_merge_tree = repo
.find_tree(branch_head_merge_tree_oid)
.context("failed to find tree")?;
let new_target_head = project_repository
.commit(
user,
format!(
"Merged {}/{} into {}",
target.branch.remote(),
target.branch.branch(),
branch.name
)
.as_str(),
&branch_head_merge_tree,
&[&branch_head_commit, &new_target_commit],
signing_key,
)
.context("failed to rebase")?;
.context("failed to commit merge")?;
let mut rebase_success = true;
// check to see if these commits have already been pushed
let mut last_rebase_head = branch.head;
while rebase.next().is_some() {
let index = rebase
.inmemory_index()
.context("failed to get inmemory index")?;
if index.has_conflicts() {
rebase_success = false;
break;
}
branch.head = new_target_head;
branch.tree = branch_merge_index_tree_oid;
vb_state.set_branch(branch.clone())?;
Ok(Some(branch))
};
if let Ok(commit_id) = rebase.commit(None, &committer.clone().into(), None)
{
last_rebase_head = commit_id.into();
} else {
rebase_success = false;
break;
}
if branch.upstream.is_some() && !ok_with_force_push {
return result_merge(branch);
}
// branch was not pushed to upstream yet. attempt a rebase,
let (_, committer) = project_repository.git_signatures(user)?;
let mut rebase_options = git2::RebaseOptions::new();
rebase_options.quiet(true);
rebase_options.inmemory(true);
let mut rebase = repo
.rebase(
Some(branch.head),
Some(new_target_commit.id()),
None,
Some(&mut rebase_options),
)
.context("failed to rebase")?;
let mut rebase_success = true;
// check to see if these commits have already been pushed
let mut last_rebase_head = branch.head;
while rebase.next().is_some() {
let index = rebase
.inmemory_index()
.context("failed to get inmemory index")?;
if index.has_conflicts() {
rebase_success = false;
break;
}
if rebase_success {
// rebase worked out, rewrite the branch head
rebase.finish(None).context("failed to finish rebase")?;
branch.head = last_rebase_head;
branch.tree = branch_merge_index_tree_oid;
vb_state.set_branch(branch.clone())?;
return Ok(Some(branch));
if let Ok(commit_id) = rebase.commit(None, &committer.clone().into(), None) {
last_rebase_head = commit_id.into();
} else {
rebase_success = false;
break;
}
}
// rebase failed, do a merge commit
rebase.abort().context("failed to abort rebase")?;
if rebase_success {
// rebase worked out, rewrite the branch head
rebase.finish(None).context("failed to finish rebase")?;
branch.head = last_rebase_head;
branch.tree = branch_merge_index_tree_oid;
vb_state.set_branch(branch.clone())?;
return Ok(Some(branch));
}
result_merge(branch)
},
)
.collect::<Result<Vec<_>>>()?
.into_iter()
.flatten()
.collect::<Vec<_>>();
// rebase failed, do a merge commit
rebase.abort().context("failed to abort rebase")?;
result_merge(branch)
},
)
.collect::<Result<Vec<_>>>()?
.into_iter()
.flatten()
.collect::<Vec<_>>();
// ok, now all the problematic branches have been unapplied
// now we calculate and checkout new tree for the working directory
@ -569,9 +566,10 @@ pub fn update_base_branch(
})
.context("failed to calculate final tree")?;
repo.checkout_tree(&final_tree).force().checkout().context(
"failed to checkout index, this should not have happened, we should have already detected this",
)?;
repo.checkout_tree(&final_tree)
.force()
.checkout()
.context("failed to checkout index, this should not have happened, we should have already detected this")?;
// write new target oid
vb_state.set_default_target(target::Target {

View File

@ -1,3 +1,4 @@
use std::path::Path;
use std::{fmt, path, str::FromStr, vec};
use anyhow::{Context, Result};
@ -43,6 +44,12 @@ impl FromStr for OwnershipClaim {
}
}
impl<'a> From<&'a OwnershipClaim> for (&'a Path, &'a [Hunk]) {
fn from(value: &'a OwnershipClaim) -> Self {
(&value.file_path, &value.hunks)
}
}
impl OwnershipClaim {
pub fn is_full(&self) -> bool {
self.hunks.is_empty()
@ -67,8 +74,8 @@ impl OwnershipClaim {
}
// return a copy of self, with another ranges added
pub fn plus(&self, another: &OwnershipClaim) -> OwnershipClaim {
if !self.file_path.eq(&another.file_path) {
pub fn plus(&self, another: OwnershipClaim) -> OwnershipClaim {
if self.file_path != another.file_path {
return self.clone();
}
@ -89,23 +96,22 @@ impl OwnershipClaim {
.cloned()
.collect::<Vec<Hunk>>();
another.hunks.iter().for_each(|hunk| {
hunks.insert(0, hunk.clone());
});
for hunk in another.hunks {
hunks.insert(0, hunk);
}
OwnershipClaim {
file_path: self.file_path.clone(),
file_path: another.file_path,
hunks,
}
}
// returns (taken, remaining)
// if all of the ranges are removed, return None
/// returns `(taken, remaining)` if all the ranges are removed, return `None`
pub fn minus(
&self,
another: &OwnershipClaim,
) -> (Option<OwnershipClaim>, Option<OwnershipClaim>) {
if !self.file_path.eq(&another.file_path) {
if self.file_path != another.file_path {
// no changes
return (None, Some(self.clone()));
}

View File

@ -13,6 +13,7 @@ pub struct Hunk {
pub timestamp_ms: Option<u128>,
pub start: u32,
pub end: u32,
pub locked_to: Vec<diff::HunkLock>,
}
impl From<&diff::GitHunk> for Hunk {
@ -22,6 +23,7 @@ impl From<&diff::GitHunk> for Hunk {
end: hunk.new_start + hunk.new_lines,
hash: Some(Hunk::hash_diff(hunk.diff_lines.as_ref())),
timestamp_ms: None,
locked_to: hunk.locked_to.to_vec(),
}
}
}
@ -43,6 +45,7 @@ impl From<RangeInclusive<u32>> for Hunk {
end: *range.end(),
hash: None,
timestamp_ms: None,
locked_to: vec![],
}
}
}
@ -121,6 +124,7 @@ impl Hunk {
timestamp_ms,
start,
end,
locked_to: vec![],
})
}
}

View File

@ -80,7 +80,7 @@ impl BranchOwnershipClaims {
true
}
pub fn put(&mut self, ownership: &OwnershipClaim) {
pub fn put(&mut self, ownership: OwnershipClaim) {
let target = self
.claims
.iter()
@ -94,7 +94,7 @@ impl BranchOwnershipClaims {
if let Some(target) = target {
self.claims.insert(0, target.plus(ownership));
} else {
self.claims.insert(0, ownership.clone());
self.claims.insert(0, ownership);
}
}

View File

@ -1,4 +1,10 @@
use crate::error::Error;
use crate::{
error::Error,
snapshots::{
entry::{OperationType, SnapshotDetails},
snapshot,
},
};
use std::{collections::HashMap, path::Path, sync::Arc};
use anyhow::Context;
@ -225,11 +231,70 @@ impl Controller {
&self,
project_id: &ProjectId,
branch_id: &BranchId,
commit_oid: git::Oid,
ownership: &BranchOwnershipClaims,
) -> Result<git::Oid, Error> {
self.inner(project_id)
.await
.amend(project_id, branch_id, ownership)
.amend(project_id, branch_id, commit_oid, ownership)
.await
}
pub async fn move_commit_file(
&self,
project_id: &ProjectId,
branch_id: &BranchId,
from_commit_oid: git::Oid,
to_commit_oid: git::Oid,
ownership: &BranchOwnershipClaims,
) -> Result<git::Oid, Error> {
self.inner(project_id)
.await
.move_commit_file(
project_id,
branch_id,
from_commit_oid,
to_commit_oid,
ownership,
)
.await
}
pub async fn undo_commit(
&self,
project_id: &ProjectId,
branch_id: &BranchId,
commit_oid: git::Oid,
) -> Result<(), Error> {
self.inner(project_id)
.await
.undo_commit(project_id, branch_id, commit_oid)
.await
}
pub async fn insert_blank_commit(
&self,
project_id: &ProjectId,
branch_id: &BranchId,
commit_oid: git::Oid,
offset: i32,
) -> Result<(), Error> {
self.inner(project_id)
.await
.insert_blank_commit(project_id, branch_id, commit_oid, offset)
.await
}
pub async fn reorder_commit(
&self,
project_id: &ProjectId,
branch_id: &BranchId,
commit_oid: git::Oid,
offset: i32,
) -> Result<(), Error> {
self.inner(project_id)
.await
.reorder_commit(project_id, branch_id, commit_oid, offset)
.await
}
@ -397,7 +462,7 @@ impl ControllerInner {
})
.transpose()?;
super::commit(
let result = super::commit(
project_repository,
branch_id,
message,
@ -406,7 +471,13 @@ impl ControllerInner {
user,
run_hooks,
)
.map_err(Into::into)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::CreateCommit),
)?;
result
})
}
@ -453,6 +524,10 @@ impl ControllerInner {
self.with_verify_branch(project_id, |project_repository, _| {
let branch_id = super::create_virtual_branch(project_repository, create)?.id;
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::CreateBranch),
)?;
Ok(branch_id)
})
}
@ -475,13 +550,17 @@ impl ControllerInner {
.context("failed to get private key")
})
.transpose()?;
Ok(super::create_virtual_branch_from_branch(
let result = super::create_virtual_branch_from_branch(
project_repository,
branch,
signing_key.as_ref(),
user,
)?)
)?;
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::CreateBranch),
)?;
Ok(result)
})
}
@ -512,8 +591,12 @@ impl ControllerInner {
) -> Result<super::BaseBranch, Error> {
let project = self.projects.get(project_id)?;
let project_repository = project_repository::Repository::open(&project)?;
Ok(super::set_base_branch(&project_repository, target_branch)?)
let result = super::set_base_branch(&project_repository, target_branch)?;
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::SetBaseBranch),
)?;
Ok(result)
}
pub async fn merge_virtual_branch_upstream(
@ -535,13 +618,18 @@ impl ControllerInner {
})
.transpose()?;
super::merge_virtual_branch_upstream(
let result = super::merge_virtual_branch_upstream(
project_repository,
branch_id,
signing_key.as_ref(),
user,
)
.map_err(Into::into)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::MergeUpstream),
)?;
result
})
}
@ -560,8 +648,13 @@ impl ControllerInner {
})
.transpose()?;
super::update_base_branch(project_repository, user, signing_key.as_ref())
.map_err(Into::into)
let result = super::update_base_branch(project_repository, user, signing_key.as_ref())
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::UpdateWorkspaceBase),
)?;
result
})
}
@ -573,7 +666,23 @@ impl ControllerInner {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
let details = if branch_update.ownership.is_some() {
SnapshotDetails::new(OperationType::MoveHunk)
} else if branch_update.name.is_some() {
SnapshotDetails::new(OperationType::UpdateBranchName)
} else if branch_update.notes.is_some() {
SnapshotDetails::new(OperationType::UpdateBranchNotes)
} else if branch_update.order.is_some() {
SnapshotDetails::new(OperationType::ReorderBranches)
} else if branch_update.selected_for_changes.is_some() {
SnapshotDetails::new(OperationType::SelectDefaultVirtualBranch)
} else if branch_update.upstream.is_some() {
SnapshotDetails::new(OperationType::UpdateBranchRemoteName)
} else {
SnapshotDetails::new(OperationType::GenericBranchUpdate)
};
super::update_branch(project_repository, branch_update)?;
snapshot::create(project_repository.project(), details)?;
Ok(())
})
}
@ -587,6 +696,10 @@ impl ControllerInner {
self.with_verify_branch(project_id, |project_repository, _| {
super::delete_branch(project_repository, branch_id)?;
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::DeleteBranch),
)?;
Ok(())
})
}
@ -610,8 +723,14 @@ impl ControllerInner {
})
.transpose()?;
super::apply_branch(project_repository, branch_id, signing_key.as_ref(), user)
.map_err(Into::into)
let result =
super::apply_branch(project_repository, branch_id, signing_key.as_ref(), user)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::ApplyBranch),
)?;
result
})
}
@ -623,7 +742,13 @@ impl ControllerInner {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
super::unapply_ownership(project_repository, ownership).map_err(Into::into)
let result =
super::unapply_ownership(project_repository, ownership).map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::DiscardHunk),
)?;
result
})
}
@ -635,7 +760,12 @@ impl ControllerInner {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
super::reset_files(project_repository, ownership).map_err(Into::into)
let result = super::reset_files(project_repository, ownership).map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::DiscardFile),
)?;
result
})
}
@ -643,12 +773,106 @@ impl ControllerInner {
&self,
project_id: &ProjectId,
branch_id: &BranchId,
commit_oid: git::Oid,
ownership: &BranchOwnershipClaims,
) -> Result<git::Oid, Error> {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
super::amend(project_repository, branch_id, ownership).map_err(Into::into)
let result = super::amend(project_repository, branch_id, commit_oid, ownership)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::AmendCommit),
)?;
result
})
}
pub async fn move_commit_file(
&self,
project_id: &ProjectId,
branch_id: &BranchId,
from_commit_oid: git::Oid,
to_commit_oid: git::Oid,
ownership: &BranchOwnershipClaims,
) -> Result<git::Oid, Error> {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
let result = super::move_commit_file(
project_repository,
branch_id,
from_commit_oid,
to_commit_oid,
ownership,
)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::MoveCommitFile),
)?;
result
})
}
pub async fn undo_commit(
&self,
project_id: &ProjectId,
branch_id: &BranchId,
commit_oid: git::Oid,
) -> Result<(), Error> {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
let result =
super::undo_commit(project_repository, branch_id, commit_oid).map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::UndoCommit),
)?;
result
})
}
pub async fn insert_blank_commit(
&self,
project_id: &ProjectId,
branch_id: &BranchId,
commit_oid: git::Oid,
offset: i32,
) -> Result<(), Error> {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, user| {
let result =
super::insert_blank_commit(project_repository, branch_id, commit_oid, user, offset)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::InsertBlankCommit),
)?;
result
})
}
pub async fn reorder_commit(
&self,
project_id: &ProjectId,
branch_id: &BranchId,
commit_oid: git::Oid,
offset: i32,
) -> Result<(), Error> {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
let result = super::reorder_commit(project_repository, branch_id, commit_oid, offset)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::ReorderCommit),
)?;
result
})
}
@ -661,8 +885,13 @@ impl ControllerInner {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
super::reset_branch(project_repository, branch_id, target_commit_oid)
.map_err(Into::into)
let result = super::reset_branch(project_repository, branch_id, target_commit_oid)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::UndoCommit),
)?;
result
})
}
@ -674,9 +903,14 @@ impl ControllerInner {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
super::unapply_branch(project_repository, branch_id)
let result = super::unapply_branch(project_repository, branch_id)
.map(|_| ())
.map_err(Into::into)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::UnapplyBranch),
)?;
result
})
}
@ -713,7 +947,13 @@ impl ControllerInner {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
super::cherry_pick(project_repository, branch_id, commit_oid).map_err(Into::into)
let result =
super::cherry_pick(project_repository, branch_id, commit_oid).map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::CherryPick),
)?;
result
})
}
@ -745,7 +985,13 @@ impl ControllerInner {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
super::squash(project_repository, branch_id, commit_oid).map_err(Into::into)
let result =
super::squash(project_repository, branch_id, commit_oid).map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::SquashCommit),
)?;
result
})
}
@ -758,8 +1004,14 @@ impl ControllerInner {
) -> Result<(), Error> {
let _permit = self.semaphore.acquire().await;
self.with_verify_branch(project_id, |project_repository, _| {
super::update_commit_message(project_repository, branch_id, commit_oid, message)
.map_err(Into::into)
let result =
super::update_commit_message(project_repository, branch_id, commit_oid, message)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::UpdateCommitMessage),
)?;
result
})
}
@ -829,14 +1081,19 @@ impl ControllerInner {
.context("failed to get private key")
})
.transpose()?;
super::move_commit(
let result = super::move_commit(
project_repository,
target_branch_id,
commit_oid,
user,
signing_key.as_ref(),
)
.map_err(Into::into)
.map_err(Into::into);
snapshot::create(
project_repository.project(),
SnapshotDetails::new(OperationType::MoveCommit),
)?;
result
})
}
}

View File

@ -6,6 +6,59 @@ use crate::{
projects::ProjectId,
};
// Generic error enum for use in the virtual branches module.
#[derive(Debug, thiserror::Error)]
pub enum VirtualBranchError {
#[error("project")]
Conflict(ProjectConflict),
#[error("branch not found")]
BranchNotFound(BranchNotFound),
#[error("default target not set")]
DefaultTargetNotSet(DefaultTargetNotSet),
#[error("target ownership not found")]
TargetOwnerhshipNotFound(BranchOwnershipClaims),
#[error("git object {0} not found")]
GitObjectNotFound(git::Oid),
#[error("commit failed")]
CommitFailed,
#[error("rebase failed")]
RebaseFailed,
#[error("force push not allowed")]
ForcePushNotAllowed(ForcePushNotAllowed),
#[error("branch has no commits")]
BranchHasNoCommits,
#[error(transparent)]
Other(#[from] anyhow::Error),
}
impl ErrorWithContext for VirtualBranchError {
fn context(&self) -> Option<Context> {
Some(match self {
VirtualBranchError::Conflict(ctx) => ctx.to_context(),
VirtualBranchError::BranchNotFound(ctx) => ctx.to_context(),
VirtualBranchError::DefaultTargetNotSet(ctx) => ctx.to_context(),
VirtualBranchError::TargetOwnerhshipNotFound(_) => {
error::Context::new_static(Code::Branches, "target ownership not found")
}
VirtualBranchError::GitObjectNotFound(oid) => {
error::Context::new(Code::Branches, format!("git object {oid} not found"))
}
VirtualBranchError::CommitFailed => {
error::Context::new_static(Code::Branches, "commit failed")
}
VirtualBranchError::RebaseFailed => {
error::Context::new_static(Code::Branches, "rebase failed")
}
VirtualBranchError::BranchHasNoCommits => error::Context::new_static(
Code::Branches,
"Branch has no commits - there is nothing to amend to",
),
VirtualBranchError::ForcePushNotAllowed(ctx) => ctx.to_context(),
VirtualBranchError::Other(error) => return error.custom_context_or_root_cause().into(),
})
}
}
#[derive(Debug, thiserror::Error)]
pub enum VerifyError {
#[error("head is detached")]
@ -55,6 +108,8 @@ pub enum ResetBranchError {
DefaultTargetNotSet(DefaultTargetNotSet),
#[error(transparent)]
Other(#[from] anyhow::Error),
#[error(transparent)]
Git(#[from] git::Error),
}
impl ErrorWithContext for ResetBranchError {
@ -66,6 +121,7 @@ impl ErrorWithContext for ResetBranchError {
error::Context::new(Code::Branches, format!("commit {} not found", oid))
}
ResetBranchError::Other(error) => return error.custom_context_or_root_cause().into(),
ResetBranchError::Git(_err) => return None,
})
}
}
@ -313,43 +369,6 @@ impl ForcePushNotAllowed {
}
}
#[derive(Debug, thiserror::Error)]
pub enum AmendError {
#[error("force push not allowed")]
ForcePushNotAllowed(ForcePushNotAllowed),
#[error("target ownership not found")]
TargetOwnerhshipNotFound(BranchOwnershipClaims),
#[error("branch has no commits")]
BranchHasNoCommits,
#[error("default target not set")]
DefaultTargetNotSet(DefaultTargetNotSet),
#[error("branch not found")]
BranchNotFound(BranchNotFound),
#[error("project is in conflict state")]
Conflict(ProjectConflict),
#[error(transparent)]
Other(#[from] anyhow::Error),
}
impl ErrorWithContext for AmendError {
fn context(&self) -> Option<Context> {
Some(match self {
AmendError::ForcePushNotAllowed(ctx) => ctx.to_context(),
AmendError::Conflict(ctx) => ctx.to_context(),
AmendError::BranchNotFound(ctx) => ctx.to_context(),
AmendError::BranchHasNoCommits => error::Context::new_static(
Code::Branches,
"Branch has no commits - there is nothing to amend to",
),
AmendError::DefaultTargetNotSet(ctx) => ctx.to_context(),
AmendError::TargetOwnerhshipNotFound(_) => {
error::Context::new_static(Code::Branches, "target ownership not found")
}
AmendError::Other(error) => return error.custom_context_or_root_cause().into(),
})
}
}
#[derive(Debug, thiserror::Error)]
pub enum CherryPickError {
#[error("target commit {0} not found ")]

View File

@ -1,6 +1,6 @@
use std::io::{Read, Write};
use std::path::PathBuf;
use anyhow::{Context, Result};
use anyhow::{anyhow, Context, Result};
use bstr::ByteSlice;
use lazy_static::lazy_static;
@ -16,12 +16,122 @@ lazy_static! {
git::LocalRefname::new("gitbutler/integration", None);
}
const WORKSPACE_HEAD: &str = "Workspace Head";
const GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME: &str = "GitButler";
const GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL: &str = "gitbutler@gitbutler.com";
fn get_committer<'a>() -> Result<git::Signature<'a>> {
Ok(git::Signature::now(
GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME,
GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL,
)?)
}
// Creates and returns a merge commit of all active branch heads.
//
// This is the base against which we diff the working directory to understand
// what files have been modified.
pub fn get_workspace_head(
vb_state: &VirtualBranchesHandle,
project_repository: &project_repository::Repository,
) -> Result<git::Oid> {
let target = vb_state
.get_default_target()
.context("failed to get target")?;
let repo = &project_repository.git_repository;
let vb_state = VirtualBranchesHandle::new(&project_repository.project().gb_dir());
let all_virtual_branches = vb_state.list_branches()?;
let applied_virtual_branches = all_virtual_branches
.iter()
.filter(|branch| branch.applied)
.collect::<Vec<_>>();
let target_commit = repo.find_commit(target.sha)?;
let target_tree = target_commit.tree()?;
let mut workspace_tree = target_commit.tree()?;
// Merge applied branches into one `workspace_tree`.
for branch in &applied_virtual_branches {
let branch_head = repo.find_commit(branch.head)?;
let branch_tree = branch_head.tree()?;
if let Ok(mut result) = repo.merge_trees(&target_tree, &workspace_tree, &branch_tree) {
if !result.has_conflicts() {
let final_tree_oid = result.write_tree_to(repo)?;
workspace_tree = repo.find_tree(final_tree_oid)?;
} else {
// TODO: Create error type and provide context.
return Err(anyhow!("Unexpected merge conflict"));
}
}
}
let branch_heads = applied_virtual_branches
.iter()
.map(|b| repo.find_commit(b.head))
.collect::<Result<Vec<_>, _>>()?;
let branch_head_refs = branch_heads.iter().collect::<Vec<_>>();
// If no branches are applied then the workspace head is the target.
if branch_head_refs.is_empty() {
return Ok(target_commit.id());
}
// TODO(mg): Can we make this a constant?
let committer = get_committer()?;
// Create merge commit of branch heads.
let workspace_head_id = repo.commit(
None,
&committer,
&committer,
WORKSPACE_HEAD,
&workspace_tree,
branch_head_refs.as_slice(),
)?;
Ok(workspace_head_id)
}
// Before switching the user to our gitbutler integration branch we save
// the current branch into a text file. It is used in generating the commit
// message for integration branch, as a helpful hint about how to get back
// to where you were.
struct PreviousHead {
head: String,
sha: String,
}
fn read_integration_file(path: &PathBuf) -> Result<Option<PreviousHead>> {
if let Ok(prev_data) = std::fs::read_to_string(path) {
let parts: Vec<&str> = prev_data.split(':').collect();
let prev_head = parts[0].to_string();
let prev_sha = parts[1].to_string();
Ok(Some(PreviousHead {
head: prev_head,
sha: prev_sha,
}))
} else {
Ok(None)
}
}
fn write_integration_file(head: &git::Reference, path: PathBuf) -> Result<()> {
let sha = head.target().unwrap().to_string();
std::fs::write(path, format!(":{}", sha))?;
Ok(())
}
pub fn update_gitbutler_integration(
vb_state: &VirtualBranchesHandle,
project_repository: &project_repository::Repository,
) -> Result<git::Oid> {
update_gitbutler_integration_with_commit(vb_state, project_repository, None)
}
pub fn update_gitbutler_integration_with_commit(
vb_state: &VirtualBranchesHandle,
project_repository: &project_repository::Repository,
integration_commit_id: Option<git::Oid>,
) -> Result<git::Oid> {
let target = vb_state
.get_default_target()
@ -41,27 +151,19 @@ pub fn update_gitbutler_integration(
let target_commit = repo.find_commit(target.sha)?;
// get current repo head for reference
let head = repo.head()?;
let mut prev_head = head.name().unwrap().to_string();
let mut prev_sha = head.target().unwrap().to_string();
let integration_file = repo.path().join("integration");
if prev_head == GITBUTLER_INTEGRATION_REFERENCE.to_string() {
// read the .git/integration file
if let Ok(mut integration_file) = std::fs::File::open(integration_file) {
let mut prev_data = String::new();
integration_file.read_to_string(&mut prev_data)?;
let parts: Vec<&str> = prev_data.split(':').collect();
prev_head = parts[0].to_string();
prev_sha = parts[1].to_string();
let head_ref = repo.head()?;
let integration_filepath = repo.path().join("integration");
let mut prev_branch = read_integration_file(&integration_filepath)?;
if let Some(branch) = &prev_branch {
if branch.head != GITBUTLER_INTEGRATION_REFERENCE.to_string() {
// we are moving from a regular branch to our gitbutler integration branch, write a file to
// .git/integration with the previous head and name
write_integration_file(&head_ref, integration_filepath)?;
prev_branch = Some(PreviousHead {
head: head_ref.target().unwrap().to_string(),
sha: head_ref.target().unwrap().to_string(),
});
}
} else {
// we are moving from a regular branch to our gitbutler integration branch, save the original
// write a file to .git/integration with the previous head and name
let mut file = std::fs::File::create(integration_file)?;
prev_head.push(':');
prev_head.push_str(&prev_sha);
file.write_all(prev_head.as_bytes())?;
}
// commit index to temp head for the merge
@ -80,19 +182,12 @@ pub fn update_gitbutler_integration(
.filter(|branch| branch.applied)
.collect::<Vec<_>>();
let base_tree = target_commit.tree()?;
let mut final_tree = target_commit.tree()?;
for branch in &applied_virtual_branches {
// merge this branches tree with our tree
let branch_head = repo.find_commit(branch.head)?;
let branch_tree = branch_head.tree()?;
if let Ok(mut result) = repo.merge_trees(&base_tree, &final_tree, &branch_tree) {
if !result.has_conflicts() {
let final_tree_oid = result.write_tree_to(repo)?;
final_tree = repo.find_tree(final_tree_oid)?;
}
}
}
let integration_commit_id = match integration_commit_id {
Some(commit_id) => commit_id,
_ => get_workspace_head(&vb_state, project_repository)?,
};
let integration_commit = repo.find_commit(integration_commit_id).unwrap();
let integration_tree = integration_commit.tree()?;
// message that says how to get back to where they were
let mut message = "GitButler Integration Commit".to_string();
@ -125,32 +220,31 @@ pub fn update_gitbutler_integration(
message.push('\n');
}
}
message.push_str("\nYour previous branch was: ");
message.push_str(&prev_head);
message.push_str("\n\n");
message.push_str("The sha for that commit was: ");
message.push_str(&prev_sha);
message.push_str("\n\n");
if let Some(prev_branch) = prev_branch {
message.push_str("\nYour previous branch was: ");
message.push_str(&prev_branch.head);
message.push_str("\n\n");
message.push_str("The sha for that commit was: ");
message.push_str(&prev_branch.sha);
message.push_str("\n\n");
}
message.push_str("For more information about what we're doing here, check out our docs:\n");
message.push_str("https://docs.gitbutler.com/features/virtual-branches/integration-branch\n");
let committer = git::Signature::now(
GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME,
GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL,
)?;
let committer = get_committer()?;
let final_commit = repo.commit(
Some(&"refs/heads/gitbutler/integration".parse().unwrap()),
&committer,
&committer,
&message,
&final_tree,
&integration_commit.tree()?,
&[&target_commit],
)?;
// write final_tree as the current index
let mut index = repo.index()?;
index.read_tree(&final_tree)?;
index.read_tree(&integration_tree)?;
index.write()?;
// finally, update the refs/gitbutler/ heads to the states of the current virtual branches

View File

@ -111,7 +111,17 @@ pub fn get_branch_data(
}
pub fn branch_to_remote_branch(branch: &git::Branch) -> Result<Option<RemoteBranch>> {
let commit = branch.peel_to_commit()?;
let commit = match branch.peel_to_commit() {
Ok(c) => c,
Err(err) => {
tracing::warn!(
?err,
"ignoring branch {:?} as peeling failed",
branch.name()
);
return Ok(None);
}
};
branch
.target()
.map(|sha| {

View File

@ -1,7 +1,7 @@
use std::{
collections::HashMap,
fs::File,
io::{Read, Write},
io::Read,
path::{Path, PathBuf},
};
@ -150,11 +150,5 @@ impl VirtualBranchesHandle {
}
fn write<P: AsRef<Path>>(file_path: P, virtual_branches: &VirtualBranches) -> anyhow::Result<()> {
let contents = toml::to_string(&virtual_branches)?;
let temp_file = tempfile::NamedTempFile::new_in(file_path.as_ref().parent().unwrap())?;
let (mut file, temp_path) = temp_file.keep()?;
file.write_all(contents.as_bytes())?;
drop(file);
std::fs::rename(temp_path, file_path.as_ref())?;
Ok(())
crate::fs::write(file_path, toml::to_string(&virtual_branches)?)
}

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More