Merge remote-tracking branch 'origin/main' into anchor-enum

# Conflicts:
#	crates/multi_buffer/src/multi_buffer.rs
This commit is contained in:
Antonio Scandurra 2024-09-23 11:44:39 -06:00
commit 5ec78700b6
215 changed files with 7058 additions and 2467 deletions

View File

@ -0,0 +1,23 @@
name: Bump collab-staging Tag
on:
schedule:
# Fire every day at 16:00 UTC (At the start of the US workday)
- cron: "0 16 * * *"
jobs:
update-collab-staging-tag:
if: github.repository_owner == 'zed-industries'
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
with:
fetch-depth: 0
- name: Update collab-staging tag
run: |
git config user.name github-actions
git config user.email github-actions@github.com
git tag -f collab-staging
git push origin collab-staging --force

View File

@ -1,23 +0,0 @@
name: Update Nightly Tag
on:
schedule:
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
- cron: "0 7 * * *"
jobs:
update-nightly-tag:
if: github.repository_owner == 'zed-industries'
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Update nightly tag
run: |
git config user.name github-actions
git config user.email github-actions@github.com
git tag -f nightly
git push origin nightly --force

View File

@ -15,8 +15,7 @@ concurrency:
jobs:
bump_patch_version:
runs-on:
- self-hosted
- test
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Checkout code
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4

View File

@ -115,6 +115,7 @@ jobs:
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "buildjet"
- name: Install Linux dependencies
run: ./script/linux
@ -143,6 +144,7 @@ jobs:
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "github"
- name: cargo clippy
# Windows can't run shell scripts, so we need to use `cargo xtask`.

View File

@ -0,0 +1,27 @@
name: "Close Stale Issues"
on:
schedule:
- cron: "0 1 * * *"
workflow_dispatch:
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: >
Hi there! 👋
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in a week. Feel free to open a new issue if you're seeing this message after the issue has been closed.
Thanks for your help!
close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!"
days-before-stale: 180
days-before-close: 7
any-of-issue-labels: "defect,panic / crash"
operations-per-run: 100
ascending: true
enable-statistics: true
stale-issue-label: "stale"

View File

@ -8,7 +8,6 @@ on:
env:
DOCKER_BUILDKIT: 1
DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
jobs:
style:
@ -61,11 +60,12 @@ jobs:
- style
- tests
runs-on:
- self-hosted
- deploy
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Add Rust to the PATH
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Install doctl
uses: digitalocean/action-doctl@v2
with:
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
- name: Sign into DigitalOcean docker registry
run: doctl registry login
@ -89,10 +89,19 @@ jobs:
needs:
- publish
runs-on:
- self-hosted
- deploy
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
with:
clean: false
- name: Install doctl
uses: digitalocean/action-doctl@v2
with:
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
- name: Sign into Kubernetes
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }}

View File

@ -24,6 +24,7 @@ jobs:
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "github"
- name: Configure linux
shell: bash -euxo pipefail {0}

View File

@ -19,8 +19,7 @@ jobs:
tests:
name: Run randomized tests
runs-on:
- self-hosted
- randomized-tests
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Install Node
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4

View File

@ -1,6 +1,9 @@
name: Release Nightly
on:
schedule:
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
- cron: "0 7 * * *"
push:
tags:
- "nightly"
@ -97,8 +100,7 @@ jobs:
name: Create a Linux *.tar.gz bundle for x86
if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
- deploy
- buildjet-16vcpu-ubuntu-2204
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
@ -114,6 +116,12 @@ jobs:
- name: Add Rust to the PATH
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Install Linux dependencies
run: ./script/linux
- name: Limit target directory size
run: script/clear-target-dir-if-larger-than 100
- name: Set release channel to nightly
run: |
set -euo pipefail
@ -163,3 +171,28 @@ jobs:
- name: Upload Zed Nightly
run: script/upload-nightly linux-targz
update-nightly-tag:
name: Update nightly tag
if: github.repository_owner == 'zed-industries'
runs-on: ubuntu-latest
needs:
- bundle-mac
- bundle-linux-x86
- bundle-linux-arm
steps:
- name: Checkout repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
with:
fetch-depth: 0
- name: Update nightly tag
run: |
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
echo "Nightly tag already points to current commit. Skipping tagging."
exit 0
fi
git config user.name github-actions
git config user.email github-actions@github.com
git tag -f nightly
git push origin nightly --force

247
Cargo.lock generated
View File

@ -21,11 +21,11 @@ dependencies = [
[[package]]
name = "addr2line"
version = "0.22.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678"
checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375"
dependencies = [
"gimli",
"gimli 0.31.0",
]
[[package]]
@ -263,9 +263,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4"
[[package]]
name = "anyhow"
version = "1.0.86"
version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
[[package]]
name = "approx"
@ -402,7 +402,9 @@ dependencies = [
"indoc",
"language",
"language_model",
"languages",
"log",
"lsp",
"markdown",
"menu",
"multi_buffer",
@ -436,6 +438,7 @@ dependencies = [
"text",
"theme",
"toml 0.8.19",
"tree-sitter-md",
"ui",
"unindent",
"util",
@ -877,10 +880,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
[[package]]
name = "async-trait"
version = "0.1.81"
name = "async-tls"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107"
checksum = "cfeefd0ca297cbbb3bd34fd6b228401c2a5177038257afd751bc29f0a2da4795"
dependencies = [
"futures-core",
"futures-io",
"rustls 0.20.9",
"rustls-pemfile 1.0.4",
"webpki",
"webpki-roots 0.22.6",
]
[[package]]
name = "async-trait"
version = "0.1.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1"
dependencies = [
"proc-macro2",
"quote",
@ -893,8 +910,8 @@ version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc"
dependencies = [
"async-native-tls",
"async-std",
"async-tls",
"futures-io",
"futures-util",
"log",
@ -981,7 +998,6 @@ dependencies = [
"editor",
"gpui",
"http_client",
"isahc",
"log",
"markdown_preview",
"menu",
@ -1049,7 +1065,7 @@ dependencies = [
"fastrand 2.1.1",
"hex",
"http 0.2.12",
"ring",
"ring 0.17.8",
"time",
"tokio",
"tracing",
@ -1218,7 +1234,7 @@ dependencies = [
"once_cell",
"p256",
"percent-encoding",
"ring",
"ring 0.17.8",
"sha2",
"subtle",
"time",
@ -1331,7 +1347,7 @@ dependencies = [
"once_cell",
"pin-project-lite",
"pin-utils",
"rustls",
"rustls 0.21.12",
"tokio",
"tracing",
]
@ -1478,17 +1494,17 @@ dependencies = [
[[package]]
name = "backtrace"
version = "0.3.73"
version = "0.3.74"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a"
checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide 0.7.4",
"miniz_oxide 0.8.0",
"object",
"rustc-demangle",
"windows-targets 0.52.6",
]
[[package]]
@ -2267,9 +2283,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.16"
version = "4.5.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019"
checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac"
dependencies = [
"clap_builder",
"clap_derive",
@ -2277,9 +2293,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.15"
version = "4.5.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73"
dependencies = [
"anstream",
"anstyle",
@ -2405,6 +2421,8 @@ dependencies = [
"rand 0.8.5",
"release_channel",
"rpc",
"rustls 0.20.9",
"rustls-native-certs 0.8.0",
"schemars",
"serde",
"serde_json",
@ -2553,6 +2571,7 @@ dependencies = [
"http_client",
"hyper",
"indoc",
"isahc_http_client",
"jsonwebtoken",
"language",
"language_model",
@ -3065,7 +3084,7 @@ dependencies = [
"cranelift-control",
"cranelift-entity",
"cranelift-isle",
"gimli",
"gimli 0.29.0",
"hashbrown 0.14.5",
"log",
"regalloc2",
@ -4015,6 +4034,7 @@ dependencies = [
"git",
"gpui",
"http_client",
"isahc_http_client",
"language",
"languages",
"node_runtime",
@ -4110,6 +4130,7 @@ dependencies = [
"http_client",
"indexed_docs",
"isahc",
"isahc_http_client",
"language",
"log",
"lsp",
@ -4148,7 +4169,7 @@ dependencies = [
"env_logger",
"extension",
"fs",
"http_client",
"isahc_http_client",
"language",
"log",
"rpc",
@ -4306,6 +4327,7 @@ dependencies = [
"ctor",
"editor",
"env_logger",
"file_icons",
"futures 0.3.30",
"fuzzy",
"gpui",
@ -4313,7 +4335,9 @@ dependencies = [
"menu",
"picker",
"project",
"schemars",
"serde",
"serde_derive",
"serde_json",
"settings",
"text",
@ -4395,7 +4419,7 @@ dependencies = [
"futures-core",
"futures-sink",
"nanorand",
"spin",
"spin 0.9.8",
]
[[package]]
@ -4853,6 +4877,12 @@ dependencies = [
"stable_deref_trait",
]
[[package]]
name = "gimli"
version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64"
[[package]]
name = "git"
version = "0.1.0"
@ -4904,7 +4934,6 @@ dependencies = [
"git",
"gpui",
"http_client",
"isahc",
"pretty_assertions",
"regex",
"serde",
@ -4921,9 +4950,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
version = "0.4.14"
version = "0.4.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19"
dependencies = [
"aho-corasick",
"bstr",
@ -5537,12 +5566,11 @@ dependencies = [
"anyhow",
"derive_more",
"futures 0.3.30",
"futures-lite 1.13.0",
"http 1.1.0",
"isahc",
"http 0.2.12",
"log",
"serde",
"serde_json",
"smol",
"url",
]
@ -5604,8 +5632,8 @@ dependencies = [
"http 0.2.12",
"hyper",
"log",
"rustls",
"rustls-native-certs",
"rustls 0.21.12",
"rustls-native-certs 0.6.3",
"tokio",
"tokio-rustls",
]
@ -5664,9 +5692,9 @@ dependencies = [
[[package]]
name = "ignore"
version = "0.4.22"
version = "0.4.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1"
checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b"
dependencies = [
"crossbeam-deque",
"globset",
@ -6017,6 +6045,17 @@ dependencies = [
"waker-fn",
]
[[package]]
name = "isahc_http_client"
version = "0.1.0"
dependencies = [
"anyhow",
"futures 0.3.30",
"http_client",
"isahc",
"util",
]
[[package]]
name = "itertools"
version = "0.10.5"
@ -6121,7 +6160,7 @@ dependencies = [
"base64 0.21.7",
"js-sys",
"pem",
"ring",
"ring 0.17.8",
"serde",
"serde_json",
"simple_asn1",
@ -6250,6 +6289,7 @@ dependencies = [
"http_client",
"image",
"inline_completion_button",
"isahc",
"language",
"log",
"menu",
@ -6372,7 +6412,7 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
dependencies = [
"spin",
"spin 0.9.8",
]
[[package]]
@ -6439,7 +6479,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
"windows-targets 0.52.6",
"windows-targets 0.48.5",
]
[[package]]
@ -7016,7 +7056,6 @@ dependencies = [
"ctor",
"env_logger",
"futures 0.3.30",
"git",
"gpui",
"itertools 0.13.0",
"language",
@ -7483,7 +7522,6 @@ dependencies = [
"anyhow",
"futures 0.3.30",
"http_client",
"isahc",
"schemars",
"serde",
"serde_json",
@ -9175,7 +9213,7 @@ dependencies = [
"once_cell",
"percent-encoding",
"pin-project-lite",
"rustls-pemfile",
"rustls-pemfile 1.0.4",
"serde",
"serde_json",
"serde_urlencoded",
@ -9239,6 +9277,21 @@ dependencies = [
"util",
]
[[package]]
name = "ring"
version = "0.16.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
dependencies = [
"cc",
"libc",
"once_cell",
"spin 0.5.2",
"untrusted 0.7.1",
"web-sys",
"winapi",
]
[[package]]
name = "ring"
version = "0.17.8"
@ -9249,8 +9302,8 @@ dependencies = [
"cfg-if",
"getrandom 0.2.15",
"libc",
"spin",
"untrusted",
"spin 0.9.8",
"untrusted 0.9.0",
"windows-sys 0.52.0",
]
@ -9406,7 +9459,7 @@ dependencies = [
"futures 0.3.30",
"glob",
"rand 0.8.5",
"ring",
"ring 0.17.8",
"serde",
"serde_json",
"shellexpand 3.1.0",
@ -9527,6 +9580,18 @@ dependencies = [
"rustix 0.38.35",
]
[[package]]
name = "rustls"
version = "0.20.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99"
dependencies = [
"log",
"ring 0.16.20",
"sct",
"webpki",
]
[[package]]
name = "rustls"
version = "0.21.12"
@ -9534,7 +9599,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
dependencies = [
"log",
"ring",
"ring 0.17.8",
"rustls-webpki",
"sct",
]
@ -9546,7 +9611,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00"
dependencies = [
"openssl-probe",
"rustls-pemfile",
"rustls-pemfile 1.0.4",
"schannel",
"security-framework",
]
[[package]]
name = "rustls-native-certs"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a"
dependencies = [
"openssl-probe",
"rustls-pemfile 2.1.3",
"rustls-pki-types",
"schannel",
"security-framework",
]
@ -9560,14 +9638,30 @@ dependencies = [
"base64 0.21.7",
]
[[package]]
name = "rustls-pemfile"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425"
dependencies = [
"base64 0.22.1",
"rustls-pki-types",
]
[[package]]
name = "rustls-pki-types"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0"
[[package]]
name = "rustls-webpki"
version = "0.101.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
dependencies = [
"ring",
"untrusted",
"ring 0.17.8",
"untrusted 0.9.0",
]
[[package]]
@ -9681,8 +9775,8 @@ version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
"ring",
"untrusted",
"ring 0.17.8",
"untrusted 0.9.0",
]
[[package]]
@ -9878,6 +9972,7 @@ dependencies = [
"gpui",
"heed",
"http_client",
"isahc_http_client",
"language",
"language_model",
"languages",
@ -10437,6 +10532,12 @@ dependencies = [
"smallvec",
]
[[package]]
name = "spin"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "spin"
version = "0.9.8"
@ -10559,8 +10660,8 @@ dependencies = [
"paste",
"percent-encoding",
"rust_decimal",
"rustls",
"rustls-pemfile",
"rustls 0.21.12",
"rustls-pemfile 1.0.4",
"serde",
"serde_json",
"sha2",
@ -10573,7 +10674,7 @@ dependencies = [
"tracing",
"url",
"uuid",
"webpki-roots",
"webpki-roots 0.25.4",
]
[[package]]
@ -11705,7 +11806,7 @@ version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
dependencies = [
"rustls",
"rustls 0.21.12",
"tokio",
]
@ -12232,7 +12333,6 @@ dependencies = [
"http 0.2.12",
"httparse",
"log",
"native-tls",
"rand 0.8.5",
"sha1",
"thiserror",
@ -12417,6 +12517,12 @@ version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c"
[[package]]
name = "untrusted"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "untrusted"
version = "0.9.0"
@ -13012,7 +13118,7 @@ dependencies = [
"cranelift-frontend",
"cranelift-native",
"cranelift-wasm",
"gimli",
"gimli 0.29.0",
"log",
"object",
"target-lexicon",
@ -13032,7 +13138,7 @@ dependencies = [
"cpp_demangle",
"cranelift-bitset",
"cranelift-entity",
"gimli",
"gimli 0.29.0",
"indexmap 2.4.0",
"log",
"object",
@ -13146,7 +13252,7 @@ checksum = "2a25199625effa4c13dd790d64bd56884b014c69829431bfe43991c740bd5bc1"
dependencies = [
"anyhow",
"cranelift-codegen",
"gimli",
"gimli 0.29.0",
"object",
"target-lexicon",
"wasmparser 0.215.0",
@ -13271,6 +13377,25 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "webpki"
version = "0.22.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53"
dependencies = [
"ring 0.17.8",
"untrusted 0.9.0",
]
[[package]]
name = "webpki-roots"
version = "0.22.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87"
dependencies = [
"webpki",
]
[[package]]
name = "webpki-roots"
version = "0.25.4"
@ -13407,7 +13532,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.48.0",
]
[[package]]
@ -13424,7 +13549,7 @@ checksum = "073efe897d9ead7fc609874f94580afc831114af5149b6a90ee0a3a39b497fe0"
dependencies = [
"anyhow",
"cranelift-codegen",
"gimli",
"gimli 0.29.0",
"regalloc2",
"smallvec",
"target-lexicon",
@ -13981,6 +14106,7 @@ dependencies = [
"parking_lot",
"postage",
"project",
"remote",
"schemars",
"serde",
"serde_json",
@ -14260,7 +14386,7 @@ dependencies = [
[[package]]
name = "zed"
version = "0.154.0"
version = "0.155.0"
dependencies = [
"activity_indicator",
"anyhow",
@ -14305,6 +14431,7 @@ dependencies = [
"inline_completion_button",
"install_cli",
"isahc",
"isahc_http_client",
"journal",
"language",
"language_model",

View File

@ -52,6 +52,7 @@ members = [
"crates/indexed_docs",
"crates/inline_completion_button",
"crates/install_cli",
"crates/isahc_http_client",
"crates/journal",
"crates/language",
"crates/language_model",
@ -226,6 +227,7 @@ image_viewer = { path = "crates/image_viewer" }
indexed_docs = { path = "crates/indexed_docs" }
inline_completion_button = { path = "crates/inline_completion_button" }
install_cli = { path = "crates/install_cli" }
isahc_http_client = { path = "crates/isahc_http_client" }
journal = { path = "crates/journal" }
language = { path = "crates/language" }
language_model = { path = "crates/language_model" }
@ -394,6 +396,8 @@ runtimelib = { version = "0.15", default-features = false, features = [
] }
rustc-demangle = "0.1.23"
rust-embed = { version = "8.4", features = ["include-exclude"] }
rustls = "0.20.3"
rustls-native-certs = "0.8.0"
schemars = { version = "0.8", features = ["impl_json_schema"] }
semver = "1.0"
serde = { version = "1.0", features = ["derive", "rc"] }

View File

@ -166,6 +166,7 @@
{
"context": "AssistantPanel",
"bindings": {
"ctrl-k c": "assistant::CopyCode",
"ctrl-g": "search::SelectNextMatch",
"ctrl-shift-g": "search::SelectPrevMatch",
"alt-m": "assistant::ToggleModelSelector",
@ -519,6 +520,13 @@
"alt-enter": "editor::Newline"
}
},
{
"context": "PromptEditor",
"bindings": {
"ctrl-[": "assistant::CyclePreviousInlineAssist",
"ctrl-]": "assistant::CycleNextInlineAssist"
}
},
{
"context": "ProjectSearchBar && !in_replace",
"bindings": {

View File

@ -188,6 +188,7 @@
{
"context": "AssistantPanel",
"bindings": {
"cmd-k c": "assistant::CopyCode",
"cmd-g": "search::SelectNextMatch",
"cmd-shift-g": "search::SelectPrevMatch",
"alt-m": "assistant::ToggleModelSelector",
@ -526,6 +527,13 @@
"ctrl-enter": "assistant::InlineAssist"
}
},
{
"context": "PromptEditor",
"bindings": {
"ctrl-[": "assistant::CyclePreviousInlineAssist",
"ctrl-]": "assistant::CycleNextInlineAssist"
}
},
{
"context": "ProjectSearchBar && !in_replace",
"bindings": {

View File

@ -124,7 +124,6 @@
"g i": "vim::InsertAtPrevious",
"g ,": "vim::ChangeListNewer",
"g ;": "vim::ChangeListOlder",
"g q": "editor::Rewrap",
"shift-h": "vim::WindowTop",
"shift-m": "vim::WindowMiddle",
"shift-l": "vim::WindowBottom",
@ -240,6 +239,8 @@
"g shift-u": ["vim::PushOperator", "Uppercase"],
"g ~": ["vim::PushOperator", "OppositeCase"],
"\"": ["vim::PushOperator", "Register"],
"g q": ["vim::PushOperator", "Rewrap"],
"g w": ["vim::PushOperator", "Rewrap"],
"q": "vim::ToggleRecord",
"shift-q": "vim::ReplayLastRecording",
"@": ["vim::PushOperator", "ReplayRegister"],
@ -301,6 +302,7 @@
"i": ["vim::PushOperator", { "Object": { "around": false } }],
"a": ["vim::PushOperator", { "Object": { "around": true } }],
"g c": "vim::ToggleComments",
"g q": "vim::Rewrap",
"\"": ["vim::PushOperator", "Register"],
// tree-sitter related commands
"[ x": "editor::SelectLargerSyntaxNode",
@ -428,6 +430,15 @@
"~": "vim::CurrentLine"
}
},
{
"context": "vim_operator == gq",
"bindings": {
"g q": "vim::CurrentLine",
"q": "vim::CurrentLine",
"g w": "vim::CurrentLine",
"w": "vim::CurrentLine"
}
},
{
"context": "vim_operator == y",
"bindings": {

View File

@ -47,6 +47,17 @@ And here's the section to rewrite based on that prompt again for reference:
<rewrite_this>
{{{rewrite_section}}}
</rewrite_this>
{{#if diagnostic_errors}}
{{#each diagnostic_errors}}
<diagnostic_error>
<line_number>{{line_number}}</line_number>
<error_message>{{error_message}}</error_message>
<code_content>{{code_content}}</code_content>
</diagnostic_error>
{{/each}}
{{/if}}
{{/if}}
Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved.

View File

@ -0,0 +1,8 @@
A software developer is asking a question about their project. The source files in their project have been indexed into a database of semantic text embeddings.
Your task is to generate a list of 4 diverse search queries that can be run on this embedding database, in order to retrieve a list of code snippets
that are relevant to the developer's question. Redundant search queries will be heavily penalized, so only include another query if it's sufficiently
distinct from previous ones.
Here is the question that's been asked, together with context that the developer has added manually:
{{{context_buffer}}}

View File

@ -15,9 +15,11 @@
// text editor:
//
// 1. "VSCode"
// 2. "JetBrains"
// 3. "SublimeText"
// 4. "Atom"
// 2. "Atom"
// 3. "JetBrains"
// 4. "None"
// 5. "SublimeText"
// 6. "TextMate"
"base_keymap": "VSCode",
// Features that can be globally enabled or disabled
"features": {
@ -318,6 +320,10 @@
"show_parameter_hints": true,
// Corresponds to null/None LSP hint type value.
"show_other_hints": true,
// Whether to show a background for inlay hints.
//
// If set to `true`, the background will use the `hint.background` color from the current theme.
"show_background": false,
// Time to wait after editing the buffer, before requesting the hints,
// set to 0 to disable debouncing.
"edit_debounce_ms": 700,
@ -492,6 +498,11 @@
// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab.
"enable_preview_from_code_navigation": false
},
// Settings related to the file finder.
"file_finder": {
// Whether to show file icons in the file finder.
"file_icons": true
},
// Whether or not to remove any trailing whitespace from lines of a buffer
// before saving it.
"remove_trailing_whitespace_on_save": true,
@ -1025,7 +1036,7 @@
// environment variables.
//
// Examples:
// - "proxy": "socks5://localhost:10808"
// - "proxy": "socks5h://localhost:10808"
// - "proxy": "http://127.0.0.1:10809"
"proxy": null,
// Set to configure aliases for the command palette.

View File

@ -19,7 +19,10 @@ use workspace::{item::ItemHandle, StatusItemView, Workspace};
actions!(activity_indicator, [ShowErrorMessage]);
pub enum Event {
ShowError { lsp_name: Arc<str>, error: String },
ShowError {
lsp_name: LanguageServerName,
error: String,
},
}
pub struct ActivityIndicator {
@ -123,7 +126,7 @@ impl ActivityIndicator {
self.statuses.retain(|status| {
if let LanguageServerBinaryStatus::Failed { error } = &status.status {
cx.emit(Event::ShowError {
lsp_name: status.name.0.clone(),
lsp_name: status.name.clone(),
error: error.clone(),
});
false

View File

@ -49,6 +49,7 @@ pub enum Model {
/// Indicates whether this custom model supports caching.
cache_configuration: Option<AnthropicModelCacheConfiguration>,
max_output_tokens: Option<u32>,
default_temperature: Option<f32>,
},
}
@ -124,6 +125,19 @@ impl Model {
}
}
pub fn default_temperature(&self) -> f32 {
match self {
Self::Claude3_5Sonnet
| Self::Claude3Opus
| Self::Claude3Sonnet
| Self::Claude3Haiku => 1.0,
Self::Custom {
default_temperature,
..
} => default_temperature.unwrap_or(1.0),
}
}
pub fn tool_model_id(&self) -> &str {
if let Self::Custom {
tool_override: Some(tool_override),

View File

@ -51,6 +51,7 @@ indoc.workspace = true
language.workspace = true
language_model.workspace = true
log.workspace = true
lsp.workspace = true
markdown.workspace = true
menu.workspace = true
multi_buffer.workspace = true
@ -94,9 +95,11 @@ editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
language = { workspace = true, features = ["test-support"] }
language_model = { workspace = true, features = ["test-support"] }
languages = { workspace = true, features = ["test-support"] }
log.workspace = true
project = { workspace = true, features = ["test-support"] }
rand.workspace = true
serde_json_lenient.workspace = true
text = { workspace = true, features = ["test-support"] }
tree-sitter-md.workspace = true
unindent.workspace = true

View File

@ -41,9 +41,10 @@ use semantic_index::{CloudEmbeddingProvider, SemanticDb};
use serde::{Deserialize, Serialize};
use settings::{update_settings_file, Settings, SettingsStore};
use slash_command::{
auto_command, context_server_command, default_command, delta_command, diagnostics_command,
docs_command, fetch_command, file_command, now_command, project_command, prompt_command,
search_command, symbols_command, tab_command, terminal_command, workflow_command,
auto_command, cargo_workspace_command, context_server_command, default_command, delta_command,
diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command,
prompt_command, search_command, symbols_command, tab_command, terminal_command,
workflow_command,
};
use std::path::PathBuf;
use std::sync::Arc;
@ -58,6 +59,7 @@ actions!(
[
Assist,
Split,
CopyCode,
CycleMessageRole,
QuoteSelection,
InsertIntoEditor,
@ -68,6 +70,8 @@ actions!(
ConfirmCommand,
NewContext,
ToggleModelSelector,
CycleNextInlineAssist,
CyclePreviousInlineAssist
]
);
@ -358,8 +362,19 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) {
let settings = AssistantSettings::get_global(cx);
let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone());
let model_id = LanguageModelId::from(settings.default_model.model.clone());
let inline_alternatives = settings
.inline_alternatives
.iter()
.map(|alternative| {
(
LanguageModelProviderId::from(alternative.provider.clone()),
LanguageModelId::from(alternative.model.clone()),
)
})
.collect::<Vec<_>>();
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.select_active_model(&provider_name, &model_id, cx);
registry.select_inline_alternative_models(inline_alternatives, cx);
});
}
@ -370,20 +385,33 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
slash_command_registry.register_command(delta_command::DeltaSlashCommand, true);
slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true);
slash_command_registry.register_command(tab_command::TabSlashCommand, true);
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
slash_command_registry
.register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true);
slash_command_registry.register_command(prompt_command::PromptSlashCommand, true);
slash_command_registry.register_command(default_command::DefaultSlashCommand, false);
slash_command_registry.register_command(terminal_command::TerminalSlashCommand, true);
slash_command_registry.register_command(now_command::NowSlashCommand, false);
slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true);
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
if let Some(prompt_builder) = prompt_builder {
slash_command_registry.register_command(
workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()),
true,
);
cx.observe_flag::<project_command::ProjectSlashCommandFeatureFlag, _>({
let slash_command_registry = slash_command_registry.clone();
move |is_enabled, _cx| {
if is_enabled {
slash_command_registry.register_command(
project_command::ProjectSlashCommand::new(prompt_builder.clone()),
true,
);
}
}
})
.detach();
}
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
cx.observe_flag::<auto_command::AutoSlashCommandFeatureFlag, _>({
let slash_command_registry = slash_command_registry.clone();
@ -421,10 +449,12 @@ fn update_slash_commands_from_settings(cx: &mut AppContext) {
slash_command_registry.unregister_command(docs_command::DocsSlashCommand);
}
if settings.project.enabled {
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
if settings.cargo_workspace.enabled {
slash_command_registry
.register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true);
} else {
slash_command_registry.unregister_command(project_command::ProjectSlashCommand);
slash_command_registry
.unregister_command(cargo_workspace_command::CargoWorkspaceSlashCommand);
}
}

View File

@ -12,11 +12,11 @@ use crate::{
slash_command_picker,
terminal_inline_assistant::TerminalInlineAssistant,
Assist, CacheStatus, ConfirmCommand, Content, Context, ContextEvent, ContextId, ContextStore,
ContextStoreEvent, CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssistId,
InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, MessageMetadata,
MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, PendingSlashCommand,
PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, SavedContextMetadata, Split,
ToggleFocus, ToggleModelSelector, WorkflowStepResolution,
ContextStoreEvent, CopyCode, CycleMessageRole, DeployHistory, DeployPromptLibrary,
InlineAssistId, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId,
MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, NewContext,
PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata,
SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector, WorkflowStepResolution,
};
use anyhow::{anyhow, Result};
use assistant_slash_command::{SlashCommand, SlashCommandOutputSection};
@ -45,7 +45,8 @@ use gpui::{
};
use indexed_docs::IndexedDocsStore;
use language::{
language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset,
language_settings::SoftWrap, BufferSnapshot, Capability, LanguageRegistry, LspAdapterDelegate,
ToOffset,
};
use language_model::{
provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId,
@ -56,6 +57,7 @@ use multi_buffer::MultiBufferRow;
use picker::{Picker, PickerDelegate};
use project::lsp_store::LocalLspAdapterDelegate;
use project::{Project, Worktree};
use rope::Point;
use search::{buffer_search::DivRegistrar, BufferSearchBar};
use serde::{Deserialize, Serialize};
use settings::{update_settings_file, Settings};
@ -81,9 +83,10 @@ use util::{maybe, ResultExt};
use workspace::{
dock::{DockPosition, Panel, PanelEvent},
item::{self, FollowableItem, Item, ItemHandle},
notifications::NotificationId,
pane::{self, SaveIntent},
searchable::{SearchEvent, SearchableItem},
DraggedSelection, Pane, Save, ShowConfiguration, ToggleZoom, ToolbarItemEvent,
DraggedSelection, Pane, Save, ShowConfiguration, Toast, ToggleZoom, ToolbarItemEvent,
ToolbarItemLocation, ToolbarItemView, Workspace,
};
use workspace::{searchable::SearchableItemHandle, DraggedTab};
@ -105,6 +108,7 @@ pub fn init(cx: &mut AppContext) {
.register_action(AssistantPanel::inline_assist)
.register_action(ContextEditor::quote_selection)
.register_action(ContextEditor::insert_selection)
.register_action(ContextEditor::copy_code)
.register_action(ContextEditor::insert_dragged_files)
.register_action(AssistantPanel::show_configuration)
.register_action(AssistantPanel::create_new_context);
@ -2810,9 +2814,8 @@ impl ContextEditor {
} else {
// If there are multiple buffers or suggestion groups, create a multibuffer
let multibuffer = cx.new_model(|cx| {
let replica_id = project.read(cx).replica_id();
let mut multibuffer = MultiBuffer::new(replica_id, Capability::ReadWrite)
.with_title(resolved_step.title.clone());
let mut multibuffer =
MultiBuffer::new(Capability::ReadWrite).with_title(resolved_step.title.clone());
for (buffer, groups) in &resolved_step.suggestion_groups {
let excerpt_ids = multibuffer.push_excerpts(
buffer.clone(),
@ -3100,6 +3103,49 @@ impl ContextEditor {
});
}
/// Returns either the selected text, or the content of the Markdown code
/// block surrounding the cursor.
fn get_selection_or_code_block(
context_editor_view: &View<ContextEditor>,
cx: &mut ViewContext<Workspace>,
) -> Option<(String, bool)> {
const CODE_FENCE_DELIMITER: &'static str = "```";
let context_editor = context_editor_view.read(cx).editor.read(cx);
if context_editor.selections.newest::<Point>(cx).is_empty() {
let snapshot = context_editor.buffer().read(cx).snapshot(cx);
let (_, _, snapshot) = snapshot.as_singleton()?;
let head = context_editor.selections.newest::<Point>(cx).head();
let offset = snapshot.point_to_offset(head);
let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?;
let mut text = snapshot
.text_for_range(surrounding_code_block_range)
.collect::<String>();
// If there is no newline trailing the closing three-backticks, then
// tree-sitter-md extends the range of the content node to include
// the backticks.
if text.ends_with(CODE_FENCE_DELIMITER) {
text.drain((text.len() - CODE_FENCE_DELIMITER.len())..);
}
(!text.is_empty()).then_some((text, true))
} else {
let anchor = context_editor.selections.newest_anchor();
let text = context_editor
.buffer()
.read(cx)
.read(cx)
.text_for_range(anchor.range())
.collect::<String>();
(!text.is_empty()).then_some((text, false))
}
}
fn insert_selection(
workspace: &mut Workspace,
_: &InsertIntoEditor,
@ -3118,17 +3164,7 @@ impl ContextEditor {
return;
};
let context_editor = context_editor_view.read(cx).editor.read(cx);
let anchor = context_editor.selections.newest_anchor();
let text = context_editor
.buffer()
.read(cx)
.read(cx)
.text_for_range(anchor.range())
.collect::<String>();
// If nothing is selected, don't delete the current selection; instead, be a no-op.
if !text.is_empty() {
if let Some((text, _)) = Self::get_selection_or_code_block(&context_editor_view, cx) {
active_editor_view.update(cx, |editor, cx| {
editor.insert(&text, cx);
editor.focus(cx);
@ -3136,6 +3172,36 @@ impl ContextEditor {
}
}
fn copy_code(workspace: &mut Workspace, _: &CopyCode, cx: &mut ViewContext<Workspace>) {
let result = maybe!({
let panel = workspace.panel::<AssistantPanel>(cx)?;
let context_editor_view = panel.read(cx).active_context_editor(cx)?;
Self::get_selection_or_code_block(&context_editor_view, cx)
});
let Some((text, is_code_block)) = result else {
return;
};
cx.write_to_clipboard(ClipboardItem::new_string(text));
struct CopyToClipboardToast;
workspace.show_toast(
Toast::new(
NotificationId::unique::<CopyToClipboardToast>(),
format!(
"{} copied to clipboard.",
if is_code_block {
"Code block"
} else {
"Selection"
}
),
)
.autohide(),
cx,
);
}
fn insert_dragged_files(
workspace: &mut Workspace,
action: &InsertDraggedFiles,
@ -3466,7 +3532,9 @@ impl ContextEditor {
for chunk in context.buffer().read(cx).text_for_range(range) {
text.push_str(chunk);
}
text.push('\n');
if message.offset_range.end < selection.range().end {
text.push('\n');
}
}
}
}
@ -4215,6 +4283,48 @@ impl ContextEditor {
}
}
/// Returns the contents of the *outermost* fenced code block that contains the given offset.
fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Option<Range<usize>> {
const CODE_BLOCK_NODE: &'static str = "fenced_code_block";
const CODE_BLOCK_CONTENT: &'static str = "code_fence_content";
let layer = snapshot.syntax_layers().next()?;
let root_node = layer.node();
let mut cursor = root_node.walk();
// Go to the first child for the given offset
while cursor.goto_first_child_for_byte(offset).is_some() {
// If we're at the end of the node, go to the next one.
// Example: if you have a fenced-code-block, and you're on the start of the line
// right after the closing ```, you want to skip the fenced-code-block and
// go to the next sibling.
if cursor.node().end_byte() == offset {
cursor.goto_next_sibling();
}
if cursor.node().start_byte() > offset {
break;
}
// We found the fenced code block.
if cursor.node().kind() == CODE_BLOCK_NODE {
// Now we need to find the child node that contains the code.
cursor.goto_first_child();
loop {
if cursor.node().kind() == CODE_BLOCK_CONTENT {
return Some(cursor.node().byte_range());
}
if !cursor.goto_next_sibling() {
break;
}
}
}
}
None
}
fn render_fold_icon_button(
editor: WeakView<Editor>,
icon: IconName,
@ -5497,3 +5607,85 @@ fn configuration_error(cx: &AppContext) -> Option<ConfigurationError> {
None
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::{AppContext, Context};
use language::Buffer;
use unindent::Unindent;
#[gpui::test]
fn test_find_code_blocks(cx: &mut AppContext) {
let markdown = languages::language("markdown", tree_sitter_md::LANGUAGE.into());
let buffer = cx.new_model(|cx| {
let text = r#"
line 0
line 1
```rust
fn main() {}
```
line 5
line 6
line 7
```go
func main() {}
```
line 11
```
this is plain text code block
```
```go
func another() {}
```
line 19
"#
.unindent();
let mut buffer = Buffer::local(text, cx);
buffer.set_language(Some(markdown.clone()), cx);
buffer
});
let snapshot = buffer.read(cx).snapshot();
let code_blocks = vec![
Point::new(3, 0)..Point::new(4, 0),
Point::new(9, 0)..Point::new(10, 0),
Point::new(13, 0)..Point::new(14, 0),
Point::new(17, 0)..Point::new(18, 0),
]
.into_iter()
.map(|range| snapshot.point_to_offset(range.start)..snapshot.point_to_offset(range.end))
.collect::<Vec<_>>();
let expected_results = vec![
(0, None),
(1, None),
(2, Some(code_blocks[0].clone())),
(3, Some(code_blocks[0].clone())),
(4, Some(code_blocks[0].clone())),
(5, None),
(6, None),
(7, None),
(8, Some(code_blocks[1].clone())),
(9, Some(code_blocks[1].clone())),
(10, Some(code_blocks[1].clone())),
(11, None),
(12, Some(code_blocks[2].clone())),
(13, Some(code_blocks[2].clone())),
(14, Some(code_blocks[2].clone())),
(15, None),
(16, Some(code_blocks[3].clone())),
(17, Some(code_blocks[3].clone())),
(18, Some(code_blocks[3].clone())),
(19, None),
];
for (row, expected) in expected_results {
let offset = snapshot.point_to_offset(Point::new(row, 0));
let range = find_surrounding_code_block(&snapshot, offset);
assert_eq!(range, expected, "unexpected result on row {:?}", row);
}
}
}

View File

@ -59,6 +59,7 @@ pub struct AssistantSettings {
pub default_width: Pixels,
pub default_height: Pixels,
pub default_model: LanguageModelSelection,
pub inline_alternatives: Vec<LanguageModelSelection>,
pub using_outdated_settings_version: bool,
}
@ -236,6 +237,7 @@ impl AssistantSettingsContent {
})
}
}),
inline_alternatives: None,
},
VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
},
@ -254,6 +256,7 @@ impl AssistantSettingsContent {
.id()
.to_string(),
}),
inline_alternatives: None,
},
}
}
@ -369,6 +372,7 @@ impl Default for VersionedAssistantSettingsContent {
default_width: None,
default_height: None,
default_model: None,
inline_alternatives: None,
})
}
}
@ -397,6 +401,8 @@ pub struct AssistantSettingsContentV2 {
default_height: Option<f32>,
/// The default model to use when creating new contexts.
default_model: Option<LanguageModelSelection>,
/// Additional models with which to generate alternatives when performing inline assists.
inline_alternatives: Option<Vec<LanguageModelSelection>>,
}
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
@ -517,10 +523,8 @@ impl Settings for AssistantSettings {
&mut settings.default_height,
value.default_height.map(Into::into),
);
merge(
&mut settings.default_model,
value.default_model.map(Into::into),
);
merge(&mut settings.default_model, value.default_model);
merge(&mut settings.inline_alternatives, value.inline_alternatives);
// merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference
}
@ -574,6 +578,7 @@ mod tests {
provider: "test-provider".into(),
model: "gpt-99".into(),
}),
inline_alternatives: None,
enabled: None,
button: None,
dock: None,

View File

@ -46,7 +46,7 @@ use std::{
sync::Arc,
time::{Duration, Instant},
};
use telemetry_events::AssistantKind;
use telemetry_events::{AssistantKind, AssistantPhase};
use text::BufferSnapshot;
use util::{post_inc, ResultExt, TryFutureExt};
use uuid::Uuid;
@ -683,7 +683,7 @@ impl Context {
buffer.set_text(saved_context.text.as_str(), cx)
});
let operations = saved_context.into_ops(&this.buffer, cx);
this.apply_ops(operations, cx).unwrap();
this.apply_ops(operations, cx);
this
}
@ -756,7 +756,7 @@ impl Context {
&mut self,
ops: impl IntoIterator<Item = ContextOperation>,
cx: &mut ModelContext<Self>,
) -> Result<()> {
) {
let mut buffer_ops = Vec::new();
for op in ops {
match op {
@ -765,10 +765,8 @@ impl Context {
}
}
self.buffer
.update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
.update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx));
self.flush_ops(cx);
Ok(())
}
fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
@ -1008,9 +1006,12 @@ impl Context {
cx: &mut ModelContext<Self>,
) {
match event {
language::BufferEvent::Operation(operation) => cx.emit(ContextEvent::Operation(
ContextOperation::BufferOperation(operation.clone()),
)),
language::BufferEvent::Operation {
operation,
is_local: true,
} => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation(
operation.clone(),
))),
language::BufferEvent::Edited => {
self.count_remaining_tokens(cx);
self.reparse(cx);
@ -1969,8 +1970,9 @@ impl Context {
}
pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
let model = LanguageModelRegistry::read_global(cx).active_model()?;
let model_registry = LanguageModelRegistry::read_global(cx);
let provider = model_registry.active_provider()?;
let model = model_registry.active_model()?;
let last_message_id = self.get_last_valid_message_id(cx)?;
if !provider.is_authenticated(cx) {
@ -2134,6 +2136,7 @@ impl Context {
telemetry.report_assistant_event(
Some(this.id.0.clone()),
AssistantKind::Panel,
AssistantPhase::Response,
model.telemetry_id(),
response_latency,
error_message,
@ -2181,7 +2184,7 @@ impl Context {
messages: Vec::new(),
tools: Vec::new(),
stop: Vec::new(),
temperature: 1.0,
temperature: None,
};
for message in self.messages(cx) {
if message.status != MessageStatus::Done {

View File

@ -1166,9 +1166,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
);
network.lock().broadcast(replica_id, ops_to_send);
context
.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx))
.unwrap();
context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx));
} else if rng.gen_bool(0.1) && replica_id != 0 {
log::info!("Context {}: disconnecting", context_index);
network.lock().disconnect_peer(replica_id);
@ -1180,9 +1178,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
.map(ContextOperation::from_proto)
.collect::<Result<Vec<_>>>()
.unwrap();
context
.update(cx, |context, cx| context.apply_ops(ops, cx))
.unwrap();
context.update(cx, |context, cx| context.apply_ops(ops, cx));
}
}
}

View File

@ -223,7 +223,7 @@ impl ContextStore {
if let Some(context) = this.loaded_context_for_id(&context_id, cx) {
let operation_proto = envelope.payload.operation.context("invalid operation")?;
let operation = ContextOperation::from_proto(operation_proto)?;
context.update(cx, |context, cx| context.apply_ops([operation], cx))?;
context.update(cx, |context, cx| context.apply_ops([operation], cx));
}
Ok(())
})?
@ -394,7 +394,7 @@ impl ContextStore {
.collect::<Result<Vec<_>>>()
})
.await?;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context
@ -531,7 +531,7 @@ impl ContextStore {
.collect::<Result<Vec<_>>>()
})
.await?;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context

File diff suppressed because it is too large Load Diff

View File

@ -796,7 +796,7 @@ impl PromptLibrary {
}],
tools: Vec::new(),
stop: Vec::new(),
temperature: 1.,
temperature: None,
},
cx,
)
@ -921,10 +921,8 @@ impl PromptLibrary {
scrollbar_width: Pixels::ZERO,
syntax: cx.theme().syntax().clone(),
status: cx.theme().status().clone(),
inlay_hints_style: HighlightStyle {
color: Some(cx.theme().status().hint),
..HighlightStyle::default()
},
inlay_hints_style:
editor::make_inlay_hints_style(cx),
suggestions_style: HighlightStyle {
color: Some(cx.theme().status().predictive),
..HighlightStyle::default()

View File

@ -4,13 +4,20 @@ use fs::Fs;
use futures::StreamExt;
use gpui::AssetSource;
use handlebars::{Handlebars, RenderError};
use language::{BufferSnapshot, LanguageName};
use language::{BufferSnapshot, LanguageName, Point};
use parking_lot::Mutex;
use serde::Serialize;
use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration};
use text::LineEnding;
use util::ResultExt;
#[derive(Serialize)]
pub struct ContentPromptDiagnosticContext {
pub line_number: usize,
pub error_message: String,
pub code_content: String,
}
#[derive(Serialize)]
pub struct ContentPromptContext {
pub content_type: String,
@ -20,6 +27,7 @@ pub struct ContentPromptContext {
pub document_content: String,
pub user_prompt: String,
pub rewrite_section: Option<String>,
pub diagnostic_errors: Vec<ContentPromptDiagnosticContext>,
}
#[derive(Serialize)]
@ -32,6 +40,11 @@ pub struct TerminalAssistantPromptContext {
pub user_prompt: String,
}
#[derive(Serialize)]
pub struct ProjectSlashCommandPromptContext {
pub context_buffer: String,
}
/// Context required to generate a workflow step resolution prompt.
#[derive(Debug, Serialize)]
pub struct StepResolutionContext {
@ -82,10 +95,9 @@ impl PromptBuilder {
/// and application context.
/// * `handlebars` - An `Arc<Mutex<Handlebars>>` for registering and updating templates.
fn watch_fs_for_template_overrides(
mut params: PromptLoadingParams,
params: PromptLoadingParams,
handlebars: Arc<Mutex<Handlebars<'static>>>,
) {
params.repo_path = None;
let templates_dir = paths::prompt_overrides_dir(params.repo_path.as_deref());
params.cx.background_executor()
.spawn(async move {
@ -220,7 +232,8 @@ impl PromptBuilder {
let before_range = 0..range.start;
let truncated_before = if before_range.len() > MAX_CTX {
is_truncated = true;
range.start - MAX_CTX..range.start
let start = buffer.clip_offset(range.start - MAX_CTX, text::Bias::Right);
start..range.start
} else {
before_range
};
@ -228,7 +241,8 @@ impl PromptBuilder {
let after_range = range.end..buffer.len();
let truncated_after = if after_range.len() > MAX_CTX {
is_truncated = true;
range.end..range.end + MAX_CTX
let end = buffer.clip_offset(range.end + MAX_CTX, text::Bias::Left);
range.end..end
} else {
after_range
};
@ -259,6 +273,17 @@ impl PromptBuilder {
} else {
None
};
let diagnostics = buffer.diagnostics_in_range::<_, Point>(range, false);
let diagnostic_errors: Vec<ContentPromptDiagnosticContext> = diagnostics
.map(|entry| {
let start = entry.range.start;
ContentPromptDiagnosticContext {
line_number: (start.row + 1) as usize,
error_message: entry.diagnostic.message.clone(),
code_content: buffer.text_for_range(entry.range.clone()).collect(),
}
})
.collect();
let context = ContentPromptContext {
content_type: content_type.to_string(),
@ -268,8 +293,8 @@ impl PromptBuilder {
document_content,
user_prompt,
rewrite_section,
diagnostic_errors,
};
self.handlebars.lock().render("content_prompt", &context)
}
@ -297,4 +322,14 @@ impl PromptBuilder {
pub fn generate_workflow_prompt(&self) -> Result<String, RenderError> {
self.handlebars.lock().render("edit_workflow", &())
}
pub fn generate_project_slash_command_prompt(
&self,
context_buffer: String,
) -> Result<String, RenderError> {
self.handlebars.lock().render(
"project_slash_command",
&ProjectSlashCommandPromptContext { context_buffer },
)
}
}

View File

@ -18,8 +18,8 @@ use std::{
};
use ui::ActiveTheme;
use workspace::Workspace;
pub mod auto_command;
pub mod cargo_workspace_command;
pub mod context_server_command;
pub mod default_command;
pub mod delta_command;

View File

@ -216,7 +216,7 @@ async fn commands_for_summaries(
}],
tools: Vec::new(),
stop: Vec::new(),
temperature: 1.0,
temperature: None,
};
while let Some(current_summaries) = stack.pop() {

View File

@ -0,0 +1,153 @@
use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context, Result};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use fs::Fs;
use gpui::{AppContext, Model, Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use project::{Project, ProjectPath};
use std::{
fmt::Write,
path::Path,
sync::{atomic::AtomicBool, Arc},
};
use ui::prelude::*;
use workspace::Workspace;
pub(crate) struct CargoWorkspaceSlashCommand;
impl CargoWorkspaceSlashCommand {
async fn build_message(fs: Arc<dyn Fs>, path_to_cargo_toml: &Path) -> Result<String> {
let buffer = fs.load(path_to_cargo_toml).await?;
let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?;
let mut message = String::new();
writeln!(message, "You are in a Rust project.")?;
if let Some(workspace) = cargo_toml.workspace {
writeln!(
message,
"The project is a Cargo workspace with the following members:"
)?;
for member in workspace.members {
writeln!(message, "- {member}")?;
}
if !workspace.default_members.is_empty() {
writeln!(message, "The default members are:")?;
for member in workspace.default_members {
writeln!(message, "- {member}")?;
}
}
if !workspace.dependencies.is_empty() {
writeln!(
message,
"The following workspace dependencies are installed:"
)?;
for dependency in workspace.dependencies.keys() {
writeln!(message, "- {dependency}")?;
}
}
} else if let Some(package) = cargo_toml.package {
writeln!(
message,
"The project name is \"{name}\".",
name = package.name
)?;
let description = package
.description
.as_ref()
.and_then(|description| description.get().ok().cloned());
if let Some(description) = description.as_ref() {
writeln!(message, "It describes itself as \"{description}\".")?;
}
if !cargo_toml.dependencies.is_empty() {
writeln!(message, "The following dependencies are installed:")?;
for dependency in cargo_toml.dependencies.keys() {
writeln!(message, "- {dependency}")?;
}
}
}
Ok(message)
}
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
let worktree = project.read(cx).worktrees(cx).next()?;
let worktree = worktree.read(cx);
let entry = worktree.entry_for_path("Cargo.toml")?;
let path = ProjectPath {
worktree_id: worktree.id(),
path: entry.path.clone(),
};
Some(Arc::from(
project.read(cx).absolute_path(&path, cx)?.as_path(),
))
}
}
impl SlashCommand for CargoWorkspaceSlashCommand {
fn name(&self) -> String {
"cargo-workspace".into()
}
fn description(&self) -> String {
"insert project workspace metadata".into()
}
fn menu_text(&self) -> String {
"Insert Project Workspace Metadata".into()
}
fn complete_argument(
self: Arc<Self>,
_arguments: &[String],
_cancel: Arc<AtomicBool>,
_workspace: Option<WeakView<Workspace>>,
_cx: &mut WindowContext,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Err(anyhow!("this command does not require argument")))
}
fn requires_argument(&self) -> bool {
false
}
fn run(
self: Arc<Self>,
_arguments: &[String],
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
_context_buffer: BufferSnapshot,
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let output = workspace.update(cx, |workspace, cx| {
let project = workspace.project().clone();
let fs = workspace.project().read(cx).fs().clone();
let path = Self::path_to_cargo_toml(project, cx);
let output = cx.background_executor().spawn(async move {
let path = path.with_context(|| "Cargo.toml not found")?;
Self::build_message(fs, &path).await
});
cx.foreground_executor().spawn(async move {
let text = output.await?;
let range = 0..text.len();
Ok(SlashCommandOutput {
text,
sections: vec![SlashCommandOutputSection {
range,
icon: IconName::FileTree,
label: "Project".into(),
metadata: None,
}],
run_commands_in_text: false,
})
})
});
output.unwrap_or_else(|error| Task::ready(Err(error)))
}
}

View File

@ -1,90 +1,39 @@
use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context, Result};
use super::{
create_label_for_command, search_command::add_search_result_section, SlashCommand,
SlashCommandOutput,
};
use crate::PromptBuilder;
use anyhow::{anyhow, Result};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use fs::Fs;
use gpui::{AppContext, Model, Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use project::{Project, ProjectPath};
use feature_flags::FeatureFlag;
use gpui::{AppContext, Task, WeakView, WindowContext};
use language::{Anchor, CodeLabel, LspAdapterDelegate};
use language_model::{LanguageModelRegistry, LanguageModelTool};
use schemars::JsonSchema;
use semantic_index::SemanticDb;
use serde::Deserialize;
pub struct ProjectSlashCommandFeatureFlag;
impl FeatureFlag for ProjectSlashCommandFeatureFlag {
const NAME: &'static str = "project-slash-command";
}
use std::{
fmt::Write,
path::Path,
fmt::Write as _,
ops::DerefMut,
sync::{atomic::AtomicBool, Arc},
};
use ui::prelude::*;
use ui::{BorrowAppContext as _, IconName};
use workspace::Workspace;
pub(crate) struct ProjectSlashCommand;
pub struct ProjectSlashCommand {
prompt_builder: Arc<PromptBuilder>,
}
impl ProjectSlashCommand {
async fn build_message(fs: Arc<dyn Fs>, path_to_cargo_toml: &Path) -> Result<String> {
let buffer = fs.load(path_to_cargo_toml).await?;
let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?;
let mut message = String::new();
writeln!(message, "You are in a Rust project.")?;
if let Some(workspace) = cargo_toml.workspace {
writeln!(
message,
"The project is a Cargo workspace with the following members:"
)?;
for member in workspace.members {
writeln!(message, "- {member}")?;
}
if !workspace.default_members.is_empty() {
writeln!(message, "The default members are:")?;
for member in workspace.default_members {
writeln!(message, "- {member}")?;
}
}
if !workspace.dependencies.is_empty() {
writeln!(
message,
"The following workspace dependencies are installed:"
)?;
for dependency in workspace.dependencies.keys() {
writeln!(message, "- {dependency}")?;
}
}
} else if let Some(package) = cargo_toml.package {
writeln!(
message,
"The project name is \"{name}\".",
name = package.name
)?;
let description = package
.description
.as_ref()
.and_then(|description| description.get().ok().cloned());
if let Some(description) = description.as_ref() {
writeln!(message, "It describes itself as \"{description}\".")?;
}
if !cargo_toml.dependencies.is_empty() {
writeln!(message, "The following dependencies are installed:")?;
for dependency in cargo_toml.dependencies.keys() {
writeln!(message, "- {dependency}")?;
}
}
}
Ok(message)
}
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
let worktree = project.read(cx).worktrees(cx).next()?;
let worktree = worktree.read(cx);
let entry = worktree.entry_for_path("Cargo.toml")?;
let path = ProjectPath {
worktree_id: worktree.id(),
path: entry.path.clone(),
};
Some(Arc::from(
project.read(cx).absolute_path(&path, cx)?.as_path(),
))
pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
Self { prompt_builder }
}
}
@ -93,12 +42,20 @@ impl SlashCommand for ProjectSlashCommand {
"project".into()
}
fn label(&self, cx: &AppContext) -> CodeLabel {
create_label_for_command("project", &[], cx)
}
fn description(&self) -> String {
"insert project metadata".into()
"Generate semantic searches based on the current context".into()
}
fn menu_text(&self) -> String {
"Insert Project Metadata".into()
"Project Context".into()
}
fn requires_argument(&self) -> bool {
false
}
fn complete_argument(
@ -108,46 +65,126 @@ impl SlashCommand for ProjectSlashCommand {
_workspace: Option<WeakView<Workspace>>,
_cx: &mut WindowContext,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Err(anyhow!("this command does not require argument")))
}
fn requires_argument(&self) -> bool {
false
Task::ready(Ok(Vec::new()))
}
fn run(
self: Arc<Self>,
_arguments: &[String],
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
_context_buffer: BufferSnapshot,
_context_slash_command_output_sections: &[SlashCommandOutputSection<Anchor>],
context_buffer: language::BufferSnapshot,
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let output = workspace.update(cx, |workspace, cx| {
let project = workspace.project().clone();
let fs = workspace.project().read(cx).fs().clone();
let path = Self::path_to_cargo_toml(project, cx);
let output = cx.background_executor().spawn(async move {
let path = path.with_context(|| "Cargo.toml not found")?;
Self::build_message(fs, &path).await
});
let model_registry = LanguageModelRegistry::read_global(cx);
let current_model = model_registry.active_model();
let prompt_builder = self.prompt_builder.clone();
cx.foreground_executor().spawn(async move {
let text = output.await?;
let range = 0..text.len();
Ok(SlashCommandOutput {
text,
sections: vec![SlashCommandOutputSection {
range,
icon: IconName::FileTree,
label: "Project".into(),
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
let project = workspace.read(cx).project().clone();
let fs = project.read(cx).fs().clone();
let Some(project_index) =
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
else {
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
};
cx.spawn(|mut cx| async move {
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
let prompt =
prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?;
let search_queries = current_model
.use_tool::<SearchQueries>(
language_model::LanguageModelRequest {
messages: vec![language_model::LanguageModelRequestMessage {
role: language_model::Role::User,
content: vec![language_model::MessageContent::Text(prompt)],
cache: false,
}],
tools: vec![],
stop: vec![],
temperature: None,
},
cx.deref_mut(),
)
.await?
.search_queries;
let results = project_index
.read_with(&cx, |project_index, cx| {
project_index.search(search_queries.clone(), 25, cx)
})?
.await?;
let results = SemanticDb::load_results(results, &fs, &cx).await?;
cx.background_executor()
.spawn(async move {
let mut output = "Project context:\n".to_string();
let mut sections = Vec::new();
for (ix, query) in search_queries.into_iter().enumerate() {
let start_ix = output.len();
writeln!(&mut output, "Results for {query}:").unwrap();
let mut has_results = false;
for result in &results {
if result.query_index == ix {
add_search_result_section(result, &mut output, &mut sections);
has_results = true;
}
}
if has_results {
sections.push(SlashCommandOutputSection {
range: start_ix..output.len(),
icon: IconName::MagnifyingGlass,
label: query.into(),
metadata: None,
});
output.push('\n');
} else {
output.truncate(start_ix);
}
}
sections.push(SlashCommandOutputSection {
range: 0..output.len(),
icon: IconName::Book,
label: "Project context".into(),
metadata: None,
}],
run_commands_in_text: false,
});
Ok(SlashCommandOutput {
text: output,
sections,
run_commands_in_text: true,
})
})
})
});
output.unwrap_or_else(|error| Task::ready(Err(error)))
.await
})
}
}
#[derive(JsonSchema, Deserialize)]
struct SearchQueries {
/// An array of semantic search queries.
///
/// These queries will be used to search the user's codebase.
/// The function can only accept 4 queries, otherwise it will error.
/// As such, it's important that you limit the length of the search_queries array to 5 queries or less.
search_queries: Vec<String>,
}
impl LanguageModelTool for SearchQueries {
fn name() -> String {
"search_queries".to_string()
}
fn description() -> String {
"Generate semantic search queries based on context".to_string()
}
}

View File

@ -7,7 +7,7 @@ use anyhow::Result;
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use feature_flags::FeatureFlag;
use gpui::{AppContext, Task, WeakView};
use language::{CodeLabel, LineEnding, LspAdapterDelegate};
use language::{CodeLabel, LspAdapterDelegate};
use semantic_index::{LoadedSearchResult, SemanticDb};
use std::{
fmt::Write,
@ -101,7 +101,7 @@ impl SlashCommand for SearchSlashCommand {
cx.spawn(|cx| async move {
let results = project_index
.read_with(&cx, |project_index, cx| {
project_index.search(query.clone(), limit.unwrap_or(5), cx)
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
})?
.await?;
@ -112,31 +112,8 @@ impl SlashCommand for SearchSlashCommand {
.spawn(async move {
let mut text = format!("Search results for {query}:\n");
let mut sections = Vec::new();
for LoadedSearchResult {
path,
range,
full_path,
file_content,
row_range,
} in loaded_results
{
let section_start_ix = text.len();
text.push_str(&codeblock_fence_for_path(
Some(&path),
Some(row_range.clone()),
));
let mut excerpt = file_content[range].to_string();
LineEnding::normalize(&mut excerpt);
text.push_str(&excerpt);
writeln!(text, "\n```\n").unwrap();
let section_end_ix = text.len() - 1;
sections.push(build_entry_output_section(
section_start_ix..section_end_ix,
Some(&full_path),
false,
Some(row_range.start() + 1..row_range.end() + 1),
));
for loaded_result in &loaded_results {
add_search_result_section(loaded_result, &mut text, &mut sections);
}
let query = SharedString::from(query);
@ -159,3 +136,35 @@ impl SlashCommand for SearchSlashCommand {
})
}
}
pub fn add_search_result_section(
loaded_result: &LoadedSearchResult,
text: &mut String,
sections: &mut Vec<SlashCommandOutputSection<usize>>,
) {
let LoadedSearchResult {
path,
full_path,
excerpt_content,
row_range,
..
} = loaded_result;
let section_start_ix = text.len();
text.push_str(&codeblock_fence_for_path(
Some(&path),
Some(row_range.clone()),
));
text.push_str(&excerpt_content);
if !text.ends_with('\n') {
text.push('\n');
}
writeln!(text, "```\n").unwrap();
let section_end_ix = text.len() - 1;
sections.push(build_entry_output_section(
section_start_ix..section_end_ix,
Some(&full_path),
false,
Some(row_range.start() + 1..row_range.end() + 1),
));
}

View File

@ -10,9 +10,9 @@ pub struct SlashCommandSettings {
/// Settings for the `/docs` slash command.
#[serde(default)]
pub docs: DocsCommandSettings,
/// Settings for the `/project` slash command.
/// Settings for the `/cargo-workspace` slash command.
#[serde(default)]
pub project: ProjectCommandSettings,
pub cargo_workspace: CargoWorkspaceCommandSettings,
}
/// Settings for the `/docs` slash command.
@ -23,10 +23,10 @@ pub struct DocsCommandSettings {
pub enabled: bool,
}
/// Settings for the `/project` slash command.
/// Settings for the `/cargo-workspace` slash command.
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
pub struct ProjectCommandSettings {
/// Whether `/project` is enabled.
pub struct CargoWorkspaceCommandSettings {
/// Whether `/cargo-workspace` is enabled.
#[serde(default)]
pub enabled: bool,
}

View File

@ -284,7 +284,7 @@ impl TerminalInlineAssistant {
messages,
tools: Vec::new(),
stop: Vec::new(),
temperature: 1.0,
temperature: None,
})
}
@ -1066,6 +1066,7 @@ impl Codegen {
telemetry.report_assistant_event(
None,
telemetry_events::AssistantKind::Inline,
telemetry_events::AssistantPhase::Response,
model_telemetry_id,
response_latency,
error_message,

View File

@ -187,6 +187,7 @@ impl WorkflowSuggestion {
suggestion_range,
initial_prompt,
initial_transaction_id,
false,
Some(workspace.clone()),
Some(assistant_panel),
cx,

View File

@ -19,7 +19,6 @@ db.workspace = true
editor.workspace = true
gpui.workspace = true
http_client.workspace = true
isahc.workspace = true
log.workspace = true
markdown_preview.workspace = true
menu.workspace = true

View File

@ -9,7 +9,6 @@ use gpui::{
actions, AppContext, AsyncAppContext, Context as _, Global, Model, ModelContext,
SemanticVersion, SharedString, Task, View, ViewContext, VisualContext, WindowContext,
};
use isahc::AsyncBody;
use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView};
use schemars::JsonSchema;
@ -20,7 +19,7 @@ use smol::{fs, io::AsyncReadExt};
use settings::{Settings, SettingsSources, SettingsStore};
use smol::{fs::File, process::Command};
use http_client::{HttpClient, HttpClientWithUrl};
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
use std::{
env::{
@ -269,7 +268,7 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Wo
let client = client::Client::global(cx).http_client();
let url = client.build_url(&format!(
"/api/release_notes/{}/{}",
"/api/release_notes/v2/{}/{}",
release_channel.dev_name(),
version
));

View File

@ -66,7 +66,7 @@ impl ChannelBuffer {
let capability = channel_store.read(cx).channel_capability(channel.id);
language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text)
})?;
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??;
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
let subscription = client.subscribe_to_entity(channel.id.0)?;
@ -151,7 +151,7 @@ impl ChannelBuffer {
cx.notify();
this.buffer
.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
})??;
})?;
Ok(())
}
@ -175,7 +175,10 @@ impl ChannelBuffer {
cx: &mut ModelContext<Self>,
) {
match event {
language::BufferEvent::Operation(operation) => {
language::BufferEvent::Operation {
operation,
is_local: true,
} => {
if *ZED_ALWAYS_ACTIVE {
if let language::Operation::UpdateSelections { selections, .. } = operation {
if selections.is_empty() {

View File

@ -1007,7 +1007,7 @@ impl ChannelStore {
.into_iter()
.map(language::proto::deserialize_operation)
.collect::<Result<Vec<_>>>()?;
buffer.apply_ops(incoming_operations, cx)?;
buffer.apply_ops(incoming_operations, cx);
anyhow::Ok(outgoing_operations)
})
.log_err();

View File

@ -18,7 +18,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup
[dependencies]
anyhow.workspace = true
async-recursion = "0.3"
async-tungstenite = { workspace = true, features = ["async-std", "async-native-tls"] }
async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] }
chrono = { workspace = true, features = ["serde"] }
clock.workspace = true
collections.workspace = true
@ -35,6 +35,8 @@ postage.workspace = true
rand.workspace = true
release_channel.workspace = true
rpc = { workspace = true, features = ["gpui"] }
rustls.workspace = true
rustls-native-certs.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true

View File

@ -240,8 +240,6 @@ pub enum EstablishConnectionError {
#[error("{0}")]
Other(#[from] anyhow::Error),
#[error("{0}")]
Http(#[from] http_client::Error),
#[error("{0}")]
InvalidHeaderValue(#[from] async_tungstenite::tungstenite::http::header::InvalidHeaderValue),
#[error("{0}")]
Io(#[from] std::io::Error),
@ -529,19 +527,13 @@ impl Client {
}
pub fn production(cx: &mut AppContext) -> Arc<Self> {
let user_agent = format!(
"Zed/{} ({}; {})",
AppVersion::global(cx),
std::env::consts::OS,
std::env::consts::ARCH
);
let clock = Arc::new(clock::RealSystemClock);
let http = Arc::new(HttpClientWithUrl::new(
let http = Arc::new(HttpClientWithUrl::new_uri(
cx.http_client(),
&ClientSettings::get_global(cx).server_url,
Some(user_agent),
ProxySettings::get_global(cx).proxy.clone(),
cx.http_client().proxy().cloned(),
));
Self::new(clock, http.clone(), cx)
Self::new(clock, http, cx)
}
pub fn id(&self) -> u64 {
@ -1145,8 +1137,32 @@ impl Client {
match url_scheme {
Https => {
let client_config = {
let mut root_store = rustls::RootCertStore::empty();
let root_certs = rustls_native_certs::load_native_certs();
for error in root_certs.errors {
log::warn!("error loading native certs: {:?}", error);
}
root_store.add_parsable_certificates(
&root_certs
.certs
.into_iter()
.map(|cert| cert.as_ref().to_owned())
.collect::<Vec<_>>(),
);
rustls::ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(root_store)
.with_no_client_auth()
};
let (stream, _) =
async_tungstenite::async_std::client_async_tls(request, stream).await?;
async_tungstenite::async_tls::client_async_tls_with_connector(
request,
stream,
Some(client_config.into()),
)
.await?;
Ok(Connection::new(
stream
.map_err(|error| anyhow!(error))
@ -1605,6 +1621,10 @@ impl ProtoClient for Client {
fn message_handler_set(&self) -> &parking_lot::Mutex<ProtoMessageHandlerSet> {
&self.handler_set
}
fn is_via_collab(&self) -> bool {
true
}
}
#[derive(Serialize, Deserialize)]

View File

@ -16,9 +16,9 @@ use std::io::Write;
use std::{env, mem, path::PathBuf, sync::Arc, time::Duration};
use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System};
use telemetry_events::{
ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CpuEvent, EditEvent,
EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent,
MemoryEvent, ReplEvent, SettingEvent,
ActionEvent, AppEvent, AssistantEvent, AssistantKind, AssistantPhase, CallEvent, CpuEvent,
EditEvent, EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent,
InlineCompletionEvent, MemoryEvent, ReplEvent, SettingEvent,
};
use tempfile::NamedTempFile;
#[cfg(not(debug_assertions))]
@ -37,9 +37,10 @@ pub struct Telemetry {
struct TelemetryState {
settings: TelemetrySettings,
metrics_id: Option<Arc<str>>, // Per logged-in user
system_id: Option<Arc<str>>, // Per system
installation_id: Option<Arc<str>>, // Per app installation (different for dev, nightly, preview, and stable)
session_id: Option<String>, // Per app launch
metrics_id: Option<Arc<str>>, // Per logged-in user
release_channel: Option<&'static str>,
architecture: &'static str,
events_queue: Vec<EventWrapper>,
@ -191,9 +192,10 @@ impl Telemetry {
settings: *TelemetrySettings::get_global(cx),
architecture: env::consts::ARCH,
release_channel,
system_id: None,
installation_id: None,
metrics_id: None,
session_id: None,
metrics_id: None,
events_queue: Vec::new(),
flush_events_task: None,
log_file: None,
@ -283,11 +285,13 @@ impl Telemetry {
pub fn start(
self: &Arc<Self>,
system_id: Option<String>,
installation_id: Option<String>,
session_id: String,
cx: &mut AppContext,
) {
let mut state = self.state.lock();
state.system_id = system_id.map(|id| id.into());
state.installation_id = installation_id.map(|id| id.into());
state.session_id = Some(session_id);
state.app_version = release_channel::AppVersion::global(cx).to_string();
@ -391,6 +395,7 @@ impl Telemetry {
self: &Arc<Self>,
conversation_id: Option<String>,
kind: AssistantKind,
phase: AssistantPhase,
model: String,
response_latency: Option<Duration>,
error_message: Option<String>,
@ -398,6 +403,7 @@ impl Telemetry {
let event = Event::Assistant(AssistantEvent {
conversation_id,
kind,
phase,
model: model.to_string(),
response_latency,
error_message,
@ -635,9 +641,10 @@ impl Telemetry {
let state = this.state.lock();
let request_body = EventRequestBody {
system_id: state.system_id.as_deref().map(Into::into),
installation_id: state.installation_id.as_deref().map(Into::into),
metrics_id: state.metrics_id.as_deref().map(Into::into),
session_id: state.session_id.clone(),
metrics_id: state.metrics_id.as_deref().map(Into::into),
is_staff: state.is_staff,
app_version: state.app_version.clone(),
os_name: state.os_name.clone(),
@ -709,6 +716,7 @@ mod tests {
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),
));
let http = FakeHttpClient::with_200_response();
let system_id = Some("system_id".to_string());
let installation_id = Some("installation_id".to_string());
let session_id = "session_id".to_string();
@ -716,7 +724,7 @@ mod tests {
let telemetry = Telemetry::new(clock.clone(), http, cx);
telemetry.state.lock().max_queue_size = 4;
telemetry.start(installation_id, session_id, cx);
telemetry.start(system_id, installation_id, session_id, cx);
assert!(is_empty_state(&telemetry));
@ -794,13 +802,14 @@ mod tests {
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),
));
let http = FakeHttpClient::with_200_response();
let system_id = Some("system_id".to_string());
let installation_id = Some("installation_id".to_string());
let session_id = "session_id".to_string();
cx.update(|cx| {
let telemetry = Telemetry::new(clock.clone(), http, cx);
telemetry.state.lock().max_queue_size = 4;
telemetry.start(installation_id, session_id, cx);
telemetry.start(system_id, installation_id, session_id, cx);
assert!(is_empty_state(&telemetry));

View File

@ -9,6 +9,8 @@ use std::{
pub use system_clock::*;
pub const LOCAL_BRANCH_REPLICA_ID: u16 = u16::MAX;
/// A unique identifier for each distributed node.
pub type ReplicaId = u16;
@ -25,7 +27,10 @@ pub struct Lamport {
/// A [vector clock](https://en.wikipedia.org/wiki/Vector_clock).
#[derive(Clone, Default, Hash, Eq, PartialEq)]
pub struct Global(SmallVec<[u32; 8]>);
pub struct Global {
values: SmallVec<[u32; 8]>,
local_branch_value: u32,
}
impl Global {
pub fn new() -> Self {
@ -33,41 +38,51 @@ impl Global {
}
pub fn get(&self, replica_id: ReplicaId) -> Seq {
self.0.get(replica_id as usize).copied().unwrap_or(0) as Seq
if replica_id == LOCAL_BRANCH_REPLICA_ID {
self.local_branch_value
} else {
self.values.get(replica_id as usize).copied().unwrap_or(0) as Seq
}
}
pub fn observe(&mut self, timestamp: Lamport) {
if timestamp.value > 0 {
let new_len = timestamp.replica_id as usize + 1;
if new_len > self.0.len() {
self.0.resize(new_len, 0);
}
if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID {
self.local_branch_value = cmp::max(self.local_branch_value, timestamp.value);
} else {
let new_len = timestamp.replica_id as usize + 1;
if new_len > self.values.len() {
self.values.resize(new_len, 0);
}
let entry = &mut self.0[timestamp.replica_id as usize];
*entry = cmp::max(*entry, timestamp.value);
let entry = &mut self.values[timestamp.replica_id as usize];
*entry = cmp::max(*entry, timestamp.value);
}
}
}
pub fn join(&mut self, other: &Self) {
if other.0.len() > self.0.len() {
self.0.resize(other.0.len(), 0);
if other.values.len() > self.values.len() {
self.values.resize(other.values.len(), 0);
}
for (left, right) in self.0.iter_mut().zip(&other.0) {
for (left, right) in self.values.iter_mut().zip(&other.values) {
*left = cmp::max(*left, *right);
}
self.local_branch_value = cmp::max(self.local_branch_value, other.local_branch_value);
}
pub fn meet(&mut self, other: &Self) {
if other.0.len() > self.0.len() {
self.0.resize(other.0.len(), 0);
if other.values.len() > self.values.len() {
self.values.resize(other.values.len(), 0);
}
let mut new_len = 0;
for (ix, (left, right)) in self
.0
.values
.iter_mut()
.zip(other.0.iter().chain(iter::repeat(&0)))
.zip(other.values.iter().chain(iter::repeat(&0)))
.enumerate()
{
if *left == 0 {
@ -80,7 +95,8 @@ impl Global {
new_len = ix + 1;
}
}
self.0.resize(new_len, 0);
self.values.resize(new_len, 0);
self.local_branch_value = cmp::min(self.local_branch_value, other.local_branch_value);
}
pub fn observed(&self, timestamp: Lamport) -> bool {
@ -88,34 +104,44 @@ impl Global {
}
pub fn observed_any(&self, other: &Self) -> bool {
self.0
self.values
.iter()
.zip(other.0.iter())
.zip(other.values.iter())
.any(|(left, right)| *right > 0 && left >= right)
|| (other.local_branch_value > 0 && self.local_branch_value >= other.local_branch_value)
}
pub fn observed_all(&self, other: &Self) -> bool {
let mut rhs = other.0.iter();
self.0.iter().all(|left| match rhs.next() {
let mut rhs = other.values.iter();
self.values.iter().all(|left| match rhs.next() {
Some(right) => left >= right,
None => true,
}) && rhs.next().is_none()
&& self.local_branch_value >= other.local_branch_value
}
pub fn changed_since(&self, other: &Self) -> bool {
self.0.len() > other.0.len()
self.values.len() > other.values.len()
|| self
.0
.values
.iter()
.zip(other.0.iter())
.zip(other.values.iter())
.any(|(left, right)| left > right)
|| self.local_branch_value > other.local_branch_value
}
pub fn iter(&self) -> impl Iterator<Item = Lamport> + '_ {
self.0.iter().enumerate().map(|(replica_id, seq)| Lamport {
replica_id: replica_id as ReplicaId,
value: *seq,
})
self.values
.iter()
.enumerate()
.map(|(replica_id, seq)| Lamport {
replica_id: replica_id as ReplicaId,
value: *seq,
})
.chain((self.local_branch_value > 0).then_some(Lamport {
replica_id: LOCAL_BRANCH_REPLICA_ID,
value: self.local_branch_value,
}))
}
}
@ -192,6 +218,9 @@ impl fmt::Debug for Global {
}
write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?;
}
if self.local_branch_value > 0 {
write!(f, "<branch>: {}", self.local_branch_value)?;
}
write!(f, "}}")
}
}

View File

@ -37,6 +37,7 @@ futures.workspace = true
google_ai.workspace = true
hex.workspace = true
http_client.workspace = true
isahc_http_client.workspace = true
jsonwebtoken.workspace = true
live_kit_server.workspace = true
log.workspace = true

View File

@ -18,8 +18,8 @@ use sha2::{Digest, Sha256};
use std::sync::{Arc, OnceLock};
use telemetry_events::{
ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event,
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent,
SettingEvent,
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, Panic,
ReplEvent, SettingEvent,
};
use uuid::Uuid;
@ -149,7 +149,8 @@ pub async fn post_crash(
installation_id = %installation_id,
description = %description,
backtrace = %summary,
"crash report");
"crash report"
);
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
let payload = slack::WebhookBody::new(|w| {
@ -295,10 +296,11 @@ pub async fn post_panic(
version = %panic.app_version,
os_name = %panic.os_name,
os_version = %panic.os_version.clone().unwrap_or_default(),
installation_id = %panic.installation_id.unwrap_or_default(),
installation_id = %panic.installation_id.clone().unwrap_or_default(),
description = %panic.payload,
backtrace = %panic.backtrace.join("\n"),
"panic report");
"panic report"
);
let backtrace = if panic.backtrace.len() > 25 {
let total = panic.backtrace.len();
@ -316,6 +318,11 @@ pub async fn post_panic(
} else {
panic.backtrace.join("\n")
};
if !report_to_slack(&panic) {
return Ok(());
}
let backtrace_with_summary = panic.payload + "\n" + &backtrace;
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
@ -356,6 +363,23 @@ pub async fn post_panic(
Ok(())
}
fn report_to_slack(panic: &Panic) -> bool {
if panic.os_name == "Linux" {
if panic.payload.contains("ERROR_SURFACE_LOST_KHR") {
return false;
}
if panic
.payload
.contains("GPU has crashed, and no debug information is available")
{
return false;
}
}
true
}
pub async fn post_events(
Extension(app): Extension<Arc<AppState>>,
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
@ -627,7 +651,9 @@ where
#[derive(Serialize, Debug, clickhouse::Row)]
pub struct EditorEventRow {
system_id: String,
installation_id: String,
session_id: Option<String>,
metrics_id: String,
operation: String,
app_version: String,
@ -647,7 +673,6 @@ pub struct EditorEventRow {
historical_event: bool,
architecture: String,
is_staff: Option<bool>,
session_id: Option<String>,
major: Option<i32>,
minor: Option<i32>,
patch: Option<i32>,
@ -677,9 +702,10 @@ impl EditorEventRow {
os_name: body.os_name.clone(),
os_version: body.os_version.clone().unwrap_or_default(),
architecture: body.architecture.clone(),
system_id: body.system_id.clone().unwrap_or_default(),
installation_id: body.installation_id.clone().unwrap_or_default(),
metrics_id: body.metrics_id.clone().unwrap_or_default(),
session_id: body.session_id.clone(),
metrics_id: body.metrics_id.clone().unwrap_or_default(),
is_staff: body.is_staff,
time: time.timestamp_millis(),
operation: event.operation,
@ -699,6 +725,7 @@ impl EditorEventRow {
#[derive(Serialize, Debug, clickhouse::Row)]
pub struct InlineCompletionEventRow {
installation_id: String,
session_id: Option<String>,
provider: String,
suggestion_accepted: bool,
app_version: String,
@ -713,7 +740,6 @@ pub struct InlineCompletionEventRow {
city: String,
time: i64,
is_staff: Option<bool>,
session_id: Option<String>,
major: Option<i32>,
minor: Option<i32>,
patch: Option<i32>,
@ -834,6 +860,7 @@ pub struct AssistantEventRow {
// AssistantEventRow
conversation_id: String,
kind: String,
phase: String,
model: String,
response_latency_in_ms: Option<i64>,
error_message: Option<String>,
@ -866,6 +893,7 @@ impl AssistantEventRow {
time: time.timestamp_millis(),
conversation_id: event.conversation_id.unwrap_or_default(),
kind: event.kind.to_string(),
phase: event.phase.to_string(),
model: event.model,
response_latency_in_ms: event
.response_latency
@ -878,6 +906,7 @@ impl AssistantEventRow {
#[derive(Debug, clickhouse::Row, Serialize)]
pub struct CpuEventRow {
installation_id: Option<String>,
session_id: Option<String>,
is_staff: Option<bool>,
usage_as_percentage: f32,
core_count: u32,
@ -886,7 +915,6 @@ pub struct CpuEventRow {
os_name: String,
os_version: String,
time: i64,
session_id: Option<String>,
// pub normalized_cpu_usage: f64, MATERIALIZED
major: Option<i32>,
minor: Option<i32>,

View File

@ -689,9 +689,7 @@ impl Database {
}
let mut text_buffer = text::Buffer::new(0, text::BufferId::new(1).unwrap(), base_text);
text_buffer
.apply_ops(operations.into_iter().filter_map(operation_from_wire))
.unwrap();
text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire));
let base_text = text_buffer.text();
let epoch = buffer.epoch + 1;

View File

@ -96,16 +96,14 @@ async fn test_channel_buffers(db: &Arc<Database>) {
text::BufferId::new(1).unwrap(),
buffer_response_b.base_text,
);
buffer_b
.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
let operation = proto::deserialize_operation(operation).unwrap();
if let language::Operation::Buffer(operation) = operation {
operation
} else {
unreachable!()
}
}))
.unwrap();
buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
let operation = proto::deserialize_operation(operation).unwrap();
if let language::Operation::Buffer(operation) = operation {
operation
} else {
unreachable!()
}
}));
assert_eq!(buffer_b.text(), "hello, cruel world");

View File

@ -22,7 +22,7 @@ use chrono::{DateTime, Duration, Utc};
use collections::HashMap;
use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase};
use futures::{Stream, StreamExt as _};
use http_client::IsahcHttpClient;
use isahc_http_client::IsahcHttpClient;
use rpc::ListModelsResponse;
use rpc::{
proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME,
@ -72,6 +72,7 @@ impl LlmState {
let http_client = IsahcHttpClient::builder()
.default_header("User-Agent", user_agent)
.build()
.map(IsahcHttpClient::from)
.context("failed to construct http client")?;
let this = Self {

View File

@ -35,6 +35,8 @@ use chrono::Utc;
use collections::{HashMap, HashSet};
pub use connection_pool::{ConnectionPool, ZedVersion};
use core::fmt::{self, Debug, Formatter};
use http_client::HttpClient;
use isahc_http_client::IsahcHttpClient;
use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL};
use sha2::Digest;
use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi};
@ -45,7 +47,6 @@ use futures::{
stream::FuturesUnordered,
FutureExt, SinkExt, StreamExt, TryStreamExt,
};
use http_client::IsahcHttpClient;
use prometheus::{register_int_gauge, IntGauge};
use rpc::{
proto::{
@ -139,7 +140,7 @@ struct Session {
connection_pool: Arc<parking_lot::Mutex<ConnectionPool>>,
app_state: Arc<AppState>,
supermaven_client: Option<Arc<SupermavenAdminApi>>,
http_client: Arc<IsahcHttpClient>,
http_client: Arc<dyn HttpClient>,
/// The GeoIP country code for the user.
#[allow(unused)]
geoip_country_code: Option<String>,
@ -957,7 +958,7 @@ impl Server {
let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION"));
let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() {
Ok(http_client) => Arc::new(http_client),
Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)),
Err(error) => {
tracing::error!(?error, "failed to create HTTP client");
return;

View File

@ -1524,6 +1524,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
show_type_hints: true,
show_parameter_hints: false,
show_other_hints: true,
show_background: false,
})
});
});
@ -1538,6 +1539,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
show_type_hints: true,
show_parameter_hints: false,
show_other_hints: true,
show_background: false,
})
});
});
@ -1786,6 +1788,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
show_type_hints: false,
show_parameter_hints: false,
show_other_hints: false,
show_background: false,
})
});
});
@ -1800,6 +1803,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
});

View File

@ -289,7 +289,7 @@ async fn test_basic_following(
.get_open_buffer(&(worktree_id, "2.txt").into(), cx)
.unwrap()
});
let mut result = MultiBuffer::new(0, Capability::ReadWrite);
let mut result = MultiBuffer::new(Capability::ReadWrite);
result.push_excerpts(
buffer_a1,
[ExcerptRange {

View File

@ -53,6 +53,7 @@ async fn test_sharing_an_ssh_remote_project(
let (project_a, worktree_id) = client_a
.build_ssh_project("/code/project1", client_ssh, cx_a)
.await;
executor.run_until_parked();
// User A shares the remote project.
let active_call_a = cx_a.read(ActiveCall::global);
@ -102,7 +103,7 @@ async fn test_sharing_an_ssh_remote_project(
all_language_settings(file, cx)
.language(Some(&("Rust".into())))
.language_servers,
["override-rust-analyzer".into()]
["override-rust-analyzer".to_string()]
)
});

View File

@ -239,7 +239,6 @@ pub struct Resource {
pub struct ResourceContent {
pub uri: Url,
pub mime_type: Option<String>,
pub content_type: String,
pub text: Option<String>,
pub data: Option<String>,
}

View File

@ -767,7 +767,7 @@ mod tests {
let buffer_1 = cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx));
let buffer_2 = cx.new_model(|cx| Buffer::local("c = 3\nd = 4\n", cx));
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
multibuffer.push_excerpts(
buffer_1.clone(),
[ExcerptRange {
@ -1018,7 +1018,7 @@ mod tests {
.unwrap();
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
multibuffer.push_excerpts(
private_buffer.clone(),
[ExcerptRange {

View File

@ -11,16 +11,14 @@ pub use smol;
pub use sqlez;
pub use sqlez_macros;
use release_channel::ReleaseChannel;
pub use release_channel::RELEASE_CHANNEL;
use sqlez::domain::Migrator;
use sqlez::thread_safe_connection::ThreadSafeConnection;
use sqlez_macros::sql;
use std::env;
use std::future::Future;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::LazyLock;
use std::sync::{atomic::Ordering, LazyLock};
use std::{env, sync::atomic::AtomicBool};
use util::{maybe, ResultExt};
const CONNECTION_INITIALIZE_QUERY: &str = sql!(
@ -47,16 +45,12 @@ pub static ALL_FILE_DB_FAILED: LazyLock<AtomicBool> = LazyLock::new(|| AtomicBoo
/// This will retry a couple times if there are failures. If opening fails once, the db directory
/// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created.
/// In either case, static variables are set so that the user can be notified.
pub async fn open_db<M: Migrator + 'static>(
db_dir: &Path,
release_channel: &ReleaseChannel,
) -> ThreadSafeConnection<M> {
pub async fn open_db<M: Migrator + 'static>(db_dir: &Path, scope: &str) -> ThreadSafeConnection<M> {
if *ZED_STATELESS {
return open_fallback_db().await;
}
let release_channel_name = release_channel.dev_name();
let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
let main_db_dir = db_dir.join(format!("0-{}", scope));
let connection = maybe!(async {
smol::fs::create_dir_all(&main_db_dir)
@ -118,7 +112,7 @@ pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection<M>
/// Implements a basic DB wrapper for a given domain
#[macro_export]
macro_rules! define_connection {
(pub static ref $id:ident: $t:ident<()> = $migrations:expr;) => {
(pub static ref $id:ident: $t:ident<()> = $migrations:expr; $($global:ident)?) => {
pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>);
impl ::std::ops::Deref for $t {
@ -139,18 +133,23 @@ macro_rules! define_connection {
}
}
use std::sync::LazyLock;
#[cfg(any(test, feature = "test-support"))]
pub static $id: LazyLock<$t> = LazyLock::new(|| {
pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
$t($crate::smol::block_on($crate::open_test_db(stringify!($id))))
});
#[cfg(not(any(test, feature = "test-support")))]
pub static $id: LazyLock<$t> = LazyLock::new(|| {
$t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL)))
pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
let db_dir = $crate::database_dir();
let scope = if false $(|| stringify!($global) == "global")? {
"global"
} else {
$crate::RELEASE_CHANNEL.dev_name()
};
$t($crate::smol::block_on($crate::open_db(db_dir, scope)))
});
};
(pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr;) => {
(pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr; $($global:ident)?) => {
pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>);
impl ::std::ops::Deref for $t {
@ -178,7 +177,13 @@ macro_rules! define_connection {
#[cfg(not(any(test, feature = "test-support")))]
pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
$t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL)))
let db_dir = $crate::database_dir();
let scope = if false $(|| stringify!($global) == "global")? {
"global"
} else {
$crate::RELEASE_CHANNEL.dev_name()
};
$t($crate::smol::block_on($crate::open_db(db_dir, scope)))
});
};
}
@ -225,7 +230,11 @@ mod tests {
.prefix("DbTests")
.tempdir()
.unwrap();
let _bad_db = open_db::<BadDB>(tempdir.path(), &release_channel::ReleaseChannel::Dev).await;
let _bad_db = open_db::<BadDB>(
tempdir.path(),
&release_channel::ReleaseChannel::Dev.dev_name(),
)
.await;
}
/// Test that DB exists but corrupted (causing recreate)
@ -262,13 +271,19 @@ mod tests {
.tempdir()
.unwrap();
{
let corrupt_db =
open_db::<CorruptedDB>(tempdir.path(), &release_channel::ReleaseChannel::Dev).await;
let corrupt_db = open_db::<CorruptedDB>(
tempdir.path(),
&release_channel::ReleaseChannel::Dev.dev_name(),
)
.await;
assert!(corrupt_db.persistent());
}
let good_db =
open_db::<GoodDB>(tempdir.path(), &release_channel::ReleaseChannel::Dev).await;
let good_db = open_db::<GoodDB>(
tempdir.path(),
&release_channel::ReleaseChannel::Dev.dev_name(),
)
.await;
assert!(
good_db.select_row::<usize>("SELECT * FROM test2").unwrap()()
.unwrap()
@ -311,8 +326,11 @@ mod tests {
.unwrap();
{
// Setup the bad database
let corrupt_db =
open_db::<CorruptedDB>(tempdir.path(), &release_channel::ReleaseChannel::Dev).await;
let corrupt_db = open_db::<CorruptedDB>(
tempdir.path(),
&release_channel::ReleaseChannel::Dev.dev_name(),
)
.await;
assert!(corrupt_db.persistent());
}
@ -323,7 +341,7 @@ mod tests {
let guard = thread::spawn(move || {
let good_db = smol::block_on(open_db::<GoodDB>(
tmp_path.as_path(),
&release_channel::ReleaseChannel::Dev,
&release_channel::ReleaseChannel::Dev.dev_name(),
));
assert!(
good_db.select_row::<usize>("SELECT * FROM test2").unwrap()()

View File

@ -60,3 +60,33 @@ mod tests {
assert_eq!(db.read_kvp("key-1").unwrap(), None);
}
}
define_connection!(pub static ref GLOBAL_KEY_VALUE_STORE: GlobalKeyValueStore<()> =
&[sql!(
CREATE TABLE IF NOT EXISTS kv_store(
key TEXT PRIMARY KEY,
value TEXT NOT NULL
) STRICT;
)];
global
);
impl GlobalKeyValueStore {
query! {
pub fn read_kvp(key: &str) -> Result<Option<String>> {
SELECT value FROM kv_store WHERE key = (?)
}
}
query! {
pub async fn write_kvp(key: String, value: String) -> Result<()> {
INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?))
}
}
query! {
pub async fn delete_kvp(key: String) -> Result<()> {
DELETE FROM kv_store WHERE key = (?)
}
}
}

View File

@ -156,12 +156,7 @@ impl ProjectDiagnosticsEditor {
cx.on_focus_out(&focus_handle, |this, _event, cx| this.focus_out(cx))
.detach();
let excerpts = cx.new_model(|cx| {
MultiBuffer::new(
project_handle.read(cx).replica_id(),
project_handle.read(cx).capability(),
)
});
let excerpts = cx.new_model(|cx| MultiBuffer::new(project_handle.read(cx).capability()));
let editor = cx.new_view(|cx| {
let mut editor =
Editor::for_multibuffer(excerpts.clone(), Some(project_handle.clone()), false, cx);

View File

@ -273,6 +273,7 @@ gpui::actions!(
NextScreen,
OpenExcerpts,
OpenExcerptsSplit,
OpenProposedChangesEditor,
OpenFile,
OpenPermalinkToLine,
OpenUrl,

View File

@ -1671,7 +1671,7 @@ mod tests {
let mut excerpt_ids = Vec::new();
let multi_buffer = cx.new_model(|cx| {
let mut multi_buffer = MultiBuffer::new(0, Capability::ReadWrite);
let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite);
excerpt_ids.extend(multi_buffer.push_excerpts(
buffer1.clone(),
[ExcerptRange {

View File

@ -35,6 +35,7 @@ mod lsp_ext;
mod mouse_context_menu;
pub mod movement;
mod persistence;
mod proposed_changes_editor;
mod rust_analyzer_ext;
pub mod scroll;
mod selections_collection;
@ -46,7 +47,7 @@ mod signature_help;
#[cfg(any(test, feature = "test-support"))]
pub mod test;
use ::git::diff::{DiffHunk, DiffHunkStatus};
use ::git::diff::DiffHunkStatus;
use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry};
pub(crate) use actions::*;
use aho_corasick::AhoCorasick;
@ -67,7 +68,7 @@ use element::LineWithInvisibles;
pub use element::{
CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition,
};
use futures::FutureExt;
use futures::{future, FutureExt};
use fuzzy::{StringMatch, StringMatchCandidate};
use git::blame::GitBlame;
use git::diff_hunk_to_display;
@ -98,6 +99,7 @@ use language::{
};
use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange};
use linked_editing_ranges::refresh_linked_ranges;
use proposed_changes_editor::{ProposedChangesBuffer, ProposedChangesEditor};
use similar::{ChangeTag, TextDiff};
use task::{ResolvedTask, TaskTemplate, TaskVariables};
@ -113,7 +115,9 @@ pub use multi_buffer::{
Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, ToOffset,
ToPoint,
};
use multi_buffer::{ExpandExcerptDirection, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16};
use multi_buffer::{
ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16,
};
use ordered_float::OrderedFloat;
use parking_lot::{Mutex, RwLock};
use project::project_settings::{GitGutterSetting, ProjectSettings};
@ -412,6 +416,19 @@ impl Default for EditorStyle {
}
}
pub fn make_inlay_hints_style(cx: &WindowContext) -> HighlightStyle {
let show_background = all_language_settings(None, cx)
.language(None)
.inlay_hints
.show_background;
HighlightStyle {
color: Some(cx.theme().status().hint),
background_color: show_background.then(|| cx.theme().status().hint_background),
..HighlightStyle::default()
}
}
type CompletionId = usize;
#[derive(Clone, Debug)]
@ -552,8 +569,8 @@ pub struct Editor {
find_all_references_task_sources: Vec<Anchor>,
next_completion_id: CompletionId,
completion_documentation_pre_resolve_debounce: DebouncedDelay,
available_code_actions: Option<(Location, Arc<[CodeAction]>)>,
code_actions_task: Option<Task<()>>,
available_code_actions: Option<(Location, Arc<[AvailableCodeAction]>)>,
code_actions_task: Option<Task<Result<()>>>,
document_highlights_task: Option<Task<()>>,
linked_editing_range_task: Option<Task<Option<()>>>,
linked_edit_ranges: linked_editing_ranges::LinkedEditingRanges,
@ -573,6 +590,7 @@ pub struct Editor {
gutter_hovered: bool,
hovered_link_state: Option<HoveredLinkState>,
inline_completion_provider: Option<RegisteredInlineCompletionProvider>,
code_action_providers: Vec<Arc<dyn CodeActionProvider>>,
active_inline_completion: Option<CompletionState>,
// enable_inline_completions is a switch that Vim can use to disable
// inline completions based on its mode.
@ -1343,10 +1361,16 @@ impl CompletionsMenu {
}
}
struct AvailableCodeAction {
excerpt_id: ExcerptId,
action: CodeAction,
provider: Arc<dyn CodeActionProvider>,
}
#[derive(Clone)]
struct CodeActionContents {
tasks: Option<Arc<ResolvedTasks>>,
actions: Option<Arc<[CodeAction]>>,
actions: Option<Arc<[AvailableCodeAction]>>,
}
impl CodeActionContents {
@ -1378,9 +1402,11 @@ impl CodeActionContents {
.map(|(kind, task)| CodeActionsItem::Task(kind.clone(), task.clone()))
})
.chain(self.actions.iter().flat_map(|actions| {
actions
.iter()
.map(|action| CodeActionsItem::CodeAction(action.clone()))
actions.iter().map(|available| CodeActionsItem::CodeAction {
excerpt_id: available.excerpt_id,
action: available.action.clone(),
provider: available.provider.clone(),
})
}))
}
fn get(&self, index: usize) -> Option<CodeActionsItem> {
@ -1393,10 +1419,13 @@ impl CodeActionContents {
.cloned()
.map(|(kind, task)| CodeActionsItem::Task(kind, task))
} else {
actions
.get(index - tasks.templates.len())
.cloned()
.map(CodeActionsItem::CodeAction)
actions.get(index - tasks.templates.len()).map(|available| {
CodeActionsItem::CodeAction {
excerpt_id: available.excerpt_id,
action: available.action.clone(),
provider: available.provider.clone(),
}
})
}
}
(Some(tasks), None) => tasks
@ -1404,7 +1433,15 @@ impl CodeActionContents {
.get(index)
.cloned()
.map(|(kind, task)| CodeActionsItem::Task(kind, task)),
(None, Some(actions)) => actions.get(index).cloned().map(CodeActionsItem::CodeAction),
(None, Some(actions)) => {
actions
.get(index)
.map(|available| CodeActionsItem::CodeAction {
excerpt_id: available.excerpt_id,
action: available.action.clone(),
provider: available.provider.clone(),
})
}
(None, None) => None,
}
}
@ -1414,7 +1451,11 @@ impl CodeActionContents {
#[derive(Clone)]
enum CodeActionsItem {
Task(TaskSourceKind, ResolvedTask),
CodeAction(CodeAction),
CodeAction {
excerpt_id: ExcerptId,
action: CodeAction,
provider: Arc<dyn CodeActionProvider>,
},
}
impl CodeActionsItem {
@ -1425,14 +1466,14 @@ impl CodeActionsItem {
Some(task)
}
fn as_code_action(&self) -> Option<&CodeAction> {
let Self::CodeAction(action) = self else {
let Self::CodeAction { action, .. } = self else {
return None;
};
Some(action)
}
fn label(&self) -> String {
match self {
Self::CodeAction(action) => action.lsp_action.title.clone(),
Self::CodeAction { action, .. } => action.lsp_action.title.clone(),
Self::Task(_, task) => task.resolved_label.clone(),
}
}
@ -1571,7 +1612,9 @@ impl CodeActionsMenu {
.enumerate()
.max_by_key(|(_, action)| match action {
CodeActionsItem::Task(_, task) => task.resolved_label.chars().count(),
CodeActionsItem::CodeAction(action) => action.lsp_action.title.chars().count(),
CodeActionsItem::CodeAction { action, .. } => {
action.lsp_action.title.chars().count()
}
})
.map(|(ix, _)| ix),
)
@ -1847,6 +1890,11 @@ impl Editor {
None
};
let mut code_action_providers = Vec::new();
if let Some(project) = project.clone() {
code_action_providers.push(Arc::new(project) as Arc<_>);
}
let mut this = Self {
focus_handle,
show_cursor_when_unfocused: false,
@ -1898,6 +1946,7 @@ impl Editor {
next_completion_id: 0,
completion_documentation_pre_resolve_debounce: DebouncedDelay::new(),
next_inlay_id: 0,
code_action_providers,
available_code_actions: Default::default(),
code_actions_task: Default::default(),
document_highlights_task: Default::default(),
@ -2142,10 +2191,6 @@ impl Editor {
});
}
pub fn replica_id(&self, cx: &AppContext) -> ReplicaId {
self.buffer.read(cx).replica_id()
}
pub fn leader_peer_id(&self) -> Option<PeerId> {
self.leader_peer_id
}
@ -4550,7 +4595,7 @@ impl Editor {
let action = action.clone();
cx.spawn(|editor, mut cx| async move {
while let Some(prev_task) = task {
prev_task.await;
prev_task.await.log_err();
task = editor.update(&mut cx, |this, _| this.code_actions_task.take())?;
}
@ -4724,17 +4769,16 @@ impl Editor {
Some(Task::ready(Ok(())))
})
}
CodeActionsItem::CodeAction(action) => {
let apply_code_actions = workspace
.read(cx)
.project()
.clone()
.update(cx, |project, cx| {
project.apply_code_action(buffer, action, true, cx)
});
CodeActionsItem::CodeAction {
excerpt_id,
action,
provider,
} => {
let apply_code_action =
provider.apply_code_action(buffer, action, excerpt_id, true, cx);
let workspace = workspace.downgrade();
Some(cx.spawn(|editor, cx| async move {
let project_transaction = apply_code_actions.await?;
let project_transaction = apply_code_action.await?;
Self::open_project_transaction(
&editor,
workspace,
@ -4755,8 +4799,6 @@ impl Editor {
title: String,
mut cx: AsyncWindowContext,
) -> Result<()> {
let replica_id = this.update(&mut cx, |this, cx| this.replica_id(cx))?;
let mut entries = transaction.0.into_iter().collect::<Vec<_>>();
cx.update(|cx| {
entries.sort_unstable_by_key(|(buffer, _)| {
@ -4799,8 +4841,7 @@ impl Editor {
let mut ranges_to_highlight = Vec::new();
let excerpt_buffer = cx.new_model(|cx| {
let mut multibuffer =
MultiBuffer::new(replica_id, Capability::ReadWrite).with_title(title);
let mut multibuffer = MultiBuffer::new(Capability::ReadWrite).with_title(title);
for (buffer_handle, transaction) in &entries {
let buffer = buffer_handle.read(cx);
ranges_to_highlight.extend(
@ -4835,8 +4876,16 @@ impl Editor {
Ok(())
}
pub fn push_code_action_provider(
&mut self,
provider: Arc<dyn CodeActionProvider>,
cx: &mut ViewContext<Self>,
) {
self.code_action_providers.push(provider);
self.refresh_code_actions(cx);
}
fn refresh_code_actions(&mut self, cx: &mut ViewContext<Self>) -> Option<()> {
let project = self.project.clone()?;
let buffer = self.buffer.read(cx);
let newest_selection = self.selections.newest_anchor().clone();
let (start_buffer, start) = buffer.text_anchor_for_position(newest_selection.start, cx)?;
@ -4850,13 +4899,30 @@ impl Editor {
.timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT)
.await;
let actions = if let Ok(code_actions) = project.update(&mut cx, |project, cx| {
project.code_actions(&start_buffer, start..end, cx)
}) {
code_actions.await
} else {
Vec::new()
};
let (providers, tasks) = this.update(&mut cx, |this, cx| {
let providers = this.code_action_providers.clone();
let tasks = this
.code_action_providers
.iter()
.map(|provider| provider.code_actions(&start_buffer, start..end, cx))
.collect::<Vec<_>>();
(providers, tasks)
})?;
let mut actions = Vec::new();
for (provider, provider_actions) in
providers.into_iter().zip(future::join_all(tasks).await)
{
if let Some(provider_actions) = provider_actions.log_err() {
actions.extend(provider_actions.into_iter().map(|action| {
AvailableCodeAction {
excerpt_id: newest_selection.start.excerpt_id,
action,
provider: provider.clone(),
}
}));
}
}
this.update(&mut cx, |this, cx| {
this.available_code_actions = if actions.is_empty() {
@ -4872,7 +4938,6 @@ impl Editor {
};
cx.notify();
})
.log_err();
}));
None
}
@ -6162,7 +6227,7 @@ impl Editor {
pub fn prepare_revert_change(
revert_changes: &mut HashMap<BufferId, Vec<(Range<text::Anchor>, Rope)>>,
multi_buffer: &Model<MultiBuffer>,
hunk: &DiffHunk<MultiBufferRow>,
hunk: &MultiBufferDiffHunk,
cx: &AppContext,
) -> Option<()> {
let buffer = multi_buffer.read(cx).buffer(hunk.buffer_id)?;
@ -6715,6 +6780,10 @@ impl Editor {
}
pub fn rewrap(&mut self, _: &Rewrap, cx: &mut ViewContext<Self>) {
self.rewrap_impl(true, cx)
}
pub fn rewrap_impl(&mut self, only_text: bool, cx: &mut ViewContext<Self>) {
let buffer = self.buffer.read(cx).snapshot(cx);
let selections = self.selections.all::<Point>(cx);
let mut selections = selections.iter().peekable();
@ -6735,7 +6804,7 @@ impl Editor {
continue;
}
let mut should_rewrap = false;
let mut should_rewrap = !only_text;
if let Some(language_scope) = buffer.language_scope_at(selection.head()) {
match language_scope.language_name().0.as_ref() {
@ -6746,9 +6815,31 @@ impl Editor {
}
}
let row = selection.head().row;
let indent_size = buffer.indent_size_for_line(MultiBufferRow(row));
let indent_end = Point::new(row, indent_size.len);
// Since not all lines in the selection may be at the same indent
// level, choose the indent size that is the most common between all
// of the lines.
//
// If there is a tie, we use the deepest indent.
let (indent_size, indent_end) = {
let mut indent_size_occurrences = HashMap::default();
let mut rows_by_indent_size = HashMap::<IndentSize, Vec<u32>>::default();
for row in start_row..=end_row {
let indent = buffer.indent_size_for_line(MultiBufferRow(row));
rows_by_indent_size.entry(indent).or_default().push(row);
*indent_size_occurrences.entry(indent).or_insert(0) += 1;
}
let indent_size = indent_size_occurrences
.into_iter()
.max_by_key(|(indent, count)| (*count, indent.len))
.map(|(indent, _)| indent)
.unwrap_or_default();
let row = rows_by_indent_size[&indent_size][0];
let indent_end = Point::new(row, indent_size.len);
(indent_size, indent_end)
};
let mut line_prefix = indent_size.chars().collect::<String>();
@ -6798,10 +6889,22 @@ impl Editor {
let start = Point::new(start_row, 0);
let end = Point::new(end_row, buffer.line_len(MultiBufferRow(end_row)));
let selection_text = buffer.text_for_range(start..end).collect::<String>();
let unwrapped_text = selection_text
let Some(lines_without_prefixes) = selection_text
.lines()
.map(|line| line.strip_prefix(&line_prefix).unwrap())
.join(" ");
.map(|line| {
line.strip_prefix(&line_prefix)
.or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start()))
.ok_or_else(|| {
anyhow!("line did not start with prefix {line_prefix:?}: {line:?}")
})
})
.collect::<Result<Vec<_>, _>>()
.log_err()
else {
continue;
};
let unwrapped_text = lines_without_prefixes.join(" ");
let wrap_column = buffer
.settings_at(Point::new(start_row, 0), cx)
.preferred_line_length as usize;
@ -9310,7 +9413,7 @@ impl Editor {
snapshot: &DisplaySnapshot,
initial_point: Point,
is_wrapped: bool,
hunks: impl Iterator<Item = DiffHunk<MultiBufferRow>>,
hunks: impl Iterator<Item = MultiBufferDiffHunk>,
cx: &mut ViewContext<Editor>,
) -> bool {
let display_point = initial_point.to_display_point(snapshot);
@ -9613,7 +9716,6 @@ impl Editor {
})
})
} else if !definitions.is_empty() {
let replica_id = self.replica_id(cx);
cx.spawn(|editor, mut cx| async move {
let (title, location_tasks, workspace) = editor
.update(&mut cx, |editor, cx| {
@ -9654,7 +9756,7 @@ impl Editor {
})
.context("location tasks preparation")?;
let locations = futures::future::join_all(location_tasks)
let locations = future::join_all(location_tasks)
.await
.into_iter()
.filter_map(|location| location.transpose())
@ -9666,9 +9768,7 @@ impl Editor {
};
let opened = workspace
.update(&mut cx, |workspace, cx| {
Self::open_locations_in_multibuffer(
workspace, locations, replica_id, title, split, cx,
)
Self::open_locations_in_multibuffer(workspace, locations, title, split, cx)
})
.ok();
@ -9765,7 +9865,6 @@ impl Editor {
}
let (buffer, head) = multi_buffer.text_anchor_for_position(head, cx)?;
let replica_id = self.replica_id(cx);
let workspace = self.workspace()?;
let project = workspace.read(cx).project().clone();
let references = project.update(cx, |project, cx| project.references(&buffer, head, cx));
@ -9806,9 +9905,7 @@ impl Editor {
)
})
.unwrap();
Self::open_locations_in_multibuffer(
workspace, locations, replica_id, title, false, cx,
);
Self::open_locations_in_multibuffer(workspace, locations, title, false, cx);
Navigated::Yes
})
}))
@ -9818,7 +9915,6 @@ impl Editor {
pub fn open_locations_in_multibuffer(
workspace: &mut Workspace,
mut locations: Vec<Location>,
replica_id: ReplicaId,
title: String,
split: bool,
cx: &mut ViewContext<Workspace>,
@ -9830,7 +9926,7 @@ impl Editor {
let capability = workspace.project().read(cx).capability();
let excerpt_buffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(replica_id, capability);
let mut multibuffer = MultiBuffer::new(capability);
while let Some(location) = locations.next() {
let buffer = location.buffer.read(cx);
let mut ranges_for_buffer = Vec::new();
@ -10050,9 +10146,8 @@ impl Editor {
syntax: cx.editor_style.syntax.clone(),
status: cx.editor_style.status.clone(),
inlay_hints_style: HighlightStyle {
color: Some(cx.theme().status().hint),
font_weight: Some(FontWeight::BOLD),
..HighlightStyle::default()
..make_inlay_hints_style(cx)
},
suggestions_style: HighlightStyle {
color: Some(cx.theme().status().predictive),
@ -11865,6 +11960,52 @@ impl Editor {
self.searchable
}
fn open_proposed_changes_editor(
&mut self,
_: &OpenProposedChangesEditor,
cx: &mut ViewContext<Self>,
) {
let Some(workspace) = self.workspace() else {
cx.propagate();
return;
};
let buffer = self.buffer.read(cx);
let mut new_selections_by_buffer = HashMap::default();
for selection in self.selections.all::<usize>(cx) {
for (buffer, mut range, _) in
buffer.range_to_buffer_ranges(selection.start..selection.end, cx)
{
if selection.reversed {
mem::swap(&mut range.start, &mut range.end);
}
let mut range = range.to_point(buffer.read(cx));
range.start.column = 0;
range.end.column = buffer.read(cx).line_len(range.end.row);
new_selections_by_buffer
.entry(buffer)
.or_insert(Vec::new())
.push(range)
}
}
let proposed_changes_buffers = new_selections_by_buffer
.into_iter()
.map(|(buffer, ranges)| ProposedChangesBuffer { buffer, ranges })
.collect::<Vec<_>>();
let proposed_changes_editor = cx.new_view(|cx| {
ProposedChangesEditor::new(proposed_changes_buffers, self.project.clone(), cx)
});
cx.window_context().defer(move |cx| {
workspace.update(cx, |workspace, cx| {
workspace.active_pane().update(cx, |pane, cx| {
pane.add_item(Box::new(proposed_changes_editor), true, true, None, cx);
});
});
});
}
fn open_excerpts_in_split(&mut self, _: &OpenExcerptsSplit, cx: &mut ViewContext<Self>) {
self.open_excerpts_common(true, cx)
}
@ -12379,7 +12520,7 @@ impl Editor {
fn hunks_for_selections(
multi_buffer_snapshot: &MultiBufferSnapshot,
selections: &[Selection<Anchor>],
) -> Vec<DiffHunk<MultiBufferRow>> {
) -> Vec<MultiBufferDiffHunk> {
let buffer_rows_for_selections = selections.iter().map(|selection| {
let head = selection.head();
let tail = selection.tail();
@ -12398,7 +12539,7 @@ fn hunks_for_selections(
pub fn hunks_for_rows(
rows: impl Iterator<Item = Range<MultiBufferRow>>,
multi_buffer_snapshot: &MultiBufferSnapshot,
) -> Vec<DiffHunk<MultiBufferRow>> {
) -> Vec<MultiBufferDiffHunk> {
let mut hunks = Vec::new();
let mut processed_buffer_rows: HashMap<BufferId, HashSet<Range<text::Anchor>>> =
HashMap::default();
@ -12410,14 +12551,14 @@ pub fn hunks_for_rows(
// when the caret is just above or just below the deleted hunk.
let allow_adjacent = hunk_status(&hunk) == DiffHunkStatus::Removed;
let related_to_selection = if allow_adjacent {
hunk.associated_range.overlaps(&query_rows)
|| hunk.associated_range.start == query_rows.end
|| hunk.associated_range.end == query_rows.start
hunk.row_range.overlaps(&query_rows)
|| hunk.row_range.start == query_rows.end
|| hunk.row_range.end == query_rows.start
} else {
// `selected_multi_buffer_rows` are inclusive (e.g. [2..2] means 2nd row is selected)
// `hunk.associated_range` is exclusive (e.g. [2..3] means 2nd row is selected)
hunk.associated_range.overlaps(&selected_multi_buffer_rows)
|| selected_multi_buffer_rows.end == hunk.associated_range.start
// `hunk.row_range` is exclusive (e.g. [2..3] means 2nd row is selected)
hunk.row_range.overlaps(&selected_multi_buffer_rows)
|| selected_multi_buffer_rows.end == hunk.row_range.start
};
if related_to_selection {
if !processed_buffer_rows
@ -12504,6 +12645,48 @@ pub trait CompletionProvider {
}
}
pub trait CodeActionProvider {
fn code_actions(
&self,
buffer: &Model<Buffer>,
range: Range<text::Anchor>,
cx: &mut WindowContext,
) -> Task<Result<Vec<CodeAction>>>;
fn apply_code_action(
&self,
buffer_handle: Model<Buffer>,
action: CodeAction,
excerpt_id: ExcerptId,
push_to_history: bool,
cx: &mut WindowContext,
) -> Task<Result<ProjectTransaction>>;
}
impl CodeActionProvider for Model<Project> {
fn code_actions(
&self,
buffer: &Model<Buffer>,
range: Range<text::Anchor>,
cx: &mut WindowContext,
) -> Task<Result<Vec<CodeAction>>> {
self.update(cx, |project, cx| project.code_actions(buffer, range, cx))
}
fn apply_code_action(
&self,
buffer_handle: Model<Buffer>,
action: CodeAction,
_excerpt_id: ExcerptId,
push_to_history: bool,
cx: &mut WindowContext,
) -> Task<Result<ProjectTransaction>> {
self.update(cx, |project, cx| {
project.apply_code_action(buffer_handle, action, push_to_history, cx)
})
}
}
fn snippet_completions(
project: &Project,
buffer: &Model<Buffer>,
@ -13008,10 +13191,7 @@ impl Render for Editor {
scrollbar_width: EditorElement::SCROLLBAR_WIDTH,
syntax: cx.theme().syntax().clone(),
status: cx.theme().status().clone(),
inlay_hints_style: HighlightStyle {
color: Some(cx.theme().status().hint),
..HighlightStyle::default()
},
inlay_hints_style: make_inlay_hints_style(cx),
suggestions_style: HighlightStyle {
color: Some(cx.theme().status().predictive),
..HighlightStyle::default()
@ -13721,10 +13901,10 @@ impl RowRangeExt for Range<DisplayRow> {
}
}
fn hunk_status(hunk: &DiffHunk<MultiBufferRow>) -> DiffHunkStatus {
fn hunk_status(hunk: &MultiBufferDiffHunk) -> DiffHunkStatus {
if hunk.diff_base_byte_range.is_empty() {
DiffHunkStatus::Added
} else if hunk.associated_range.is_empty() {
} else if hunk.row_range.is_empty() {
DiffHunkStatus::Removed
} else {
DiffHunkStatus::Modified

View File

@ -2824,7 +2824,7 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) {
Buffer::local("const c: usize = 3;\n", cx).with_language(rust_language, cx)
});
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
let mut multibuffer = MultiBuffer::new(ReadWrite);
multibuffer.push_excerpts(
toml_buffer.clone(),
[ExcerptRange {
@ -4251,6 +4251,78 @@ async fn test_rewrap(cx: &mut TestAppContext) {
cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx));
cx.assert_editor_state(wrapped_text);
}
// Test rewrapping unaligned comments in a selection.
{
let language = Arc::new(Language::new(
LanguageConfig {
line_comments: vec!["// ".into(), "/// ".into()],
..LanguageConfig::default()
},
Some(tree_sitter_rust::LANGUAGE.into()),
));
cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
let unwrapped_text = indoc! {"
fn foo() {
if true {
« // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae.
// Praesent semper egestas tellus id dignissim.ˇ»
do_something();
} else {
//
}
}
"};
let wrapped_text = indoc! {"
fn foo() {
if true {
// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus
// mollis elit purus, a ornare lacus gravida vitae. Praesent semper
// egestas tellus id dignissim.ˇ
do_something();
} else {
//
}
}
"};
cx.set_state(unwrapped_text);
cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx));
cx.assert_editor_state(wrapped_text);
let unwrapped_text = indoc! {"
fn foo() {
if true {
«ˇ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae.
// Praesent semper egestas tellus id dignissim.»
do_something();
} else {
//
}
}
"};
let wrapped_text = indoc! {"
fn foo() {
if true {
// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus
// mollis elit purus, a ornare lacus gravida vitae. Praesent semper
// egestas tellus id dignissim.ˇ
do_something();
} else {
//
}
}
"};
cx.set_state(unwrapped_text);
cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx));
cx.assert_editor_state(wrapped_text);
}
}
#[gpui::test]
@ -6673,7 +6745,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) {
.unwrap();
let multi_buffer = cx.new_model(|cx| {
let mut multi_buffer = MultiBuffer::new(0, ReadWrite);
let mut multi_buffer = MultiBuffer::new(ReadWrite);
multi_buffer.push_excerpts(
buffer_1.clone(),
[
@ -8616,7 +8688,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx));
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
let mut multibuffer = MultiBuffer::new(ReadWrite);
multibuffer.push_excerpts(
buffer.clone(),
[
@ -8700,7 +8772,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
});
let buffer = cx.new_model(|cx| Buffer::local(initial_text, cx));
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
let mut multibuffer = MultiBuffer::new(ReadWrite);
multibuffer.push_excerpts(buffer, excerpt_ranges, cx);
multibuffer
});
@ -8759,7 +8831,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) {
let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx));
let mut excerpt1_id = None;
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
let mut multibuffer = MultiBuffer::new(ReadWrite);
excerpt1_id = multibuffer
.push_excerpts(
buffer.clone(),
@ -8844,7 +8916,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) {
let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx));
let mut excerpt1_id = None;
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
let mut multibuffer = MultiBuffer::new(ReadWrite);
excerpt1_id = multibuffer
.push_excerpts(
buffer.clone(),
@ -9232,7 +9304,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
let leader = pane.update(cx, |_, cx| {
let multibuffer = cx.new_model(|_| MultiBuffer::new(0, ReadWrite));
let multibuffer = cx.new_model(|_| MultiBuffer::new(ReadWrite));
cx.new_view(|cx| build_editor(multibuffer.clone(), cx))
});
@ -10687,7 +10759,7 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) {
diff_every_buffer_row(&buffer_3, sample_text_3.clone(), cols, cx);
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
let mut multibuffer = MultiBuffer::new(ReadWrite);
multibuffer.push_excerpts(
buffer_1.clone(),
[
@ -10827,7 +10899,7 @@ async fn test_mutlibuffer_in_navigation_history(cx: &mut gpui::TestAppContext) {
let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text_3.clone(), cx));
let multi_buffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
let mut multibuffer = MultiBuffer::new(ReadWrite);
multibuffer.push_excerpts(
buffer_1.clone(),
[
@ -11766,7 +11838,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
});
let multi_buffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
let mut multibuffer = MultiBuffer::new(ReadWrite);
multibuffer.push_excerpts(
buffer_1.clone(),
[

View File

@ -346,6 +346,7 @@ impl EditorElement {
register_action(view, cx, Editor::toggle_code_actions);
register_action(view, cx, Editor::open_excerpts);
register_action(view, cx, Editor::open_excerpts_in_split);
register_action(view, cx, Editor::open_proposed_changes_editor);
register_action(view, cx, Editor::toggle_soft_wrap);
register_action(view, cx, Editor::toggle_tab_bar);
register_action(view, cx, Editor::toggle_line_numbers);
@ -2079,13 +2080,13 @@ impl EditorElement {
.id(("path excerpt header", EntityId::from(block_id)))
.w_full()
.px(header_padding)
.pt(header_padding)
.child(
h_flex()
.flex_basis(Length::Definite(DefiniteLength::Fraction(0.667)))
.id("path header block")
.h(2. * cx.line_height())
.pl(gpui::px(12.))
.pr(gpui::px(8.))
.px(gpui::px(12.))
.rounded_md()
.shadow_md()
.border_1()
@ -3710,11 +3711,11 @@ impl EditorElement {
)
.map(|hunk| {
let start_display_row =
MultiBufferPoint::new(hunk.associated_range.start.0, 0)
MultiBufferPoint::new(hunk.row_range.start.0, 0)
.to_display_point(&snapshot.display_snapshot)
.row();
let mut end_display_row =
MultiBufferPoint::new(hunk.associated_range.end.0, 0)
MultiBufferPoint::new(hunk.row_range.end.0, 0)
.to_display_point(&snapshot.display_snapshot)
.row();
if end_display_row != start_display_row {

View File

@ -2,9 +2,9 @@ pub mod blame;
use std::ops::Range;
use git::diff::{DiffHunk, DiffHunkStatus};
use git::diff::DiffHunkStatus;
use language::Point;
use multi_buffer::{Anchor, MultiBufferRow};
use multi_buffer::{Anchor, MultiBufferDiffHunk};
use crate::{
display_map::{DisplaySnapshot, ToDisplayPoint},
@ -49,25 +49,25 @@ impl DisplayDiffHunk {
}
pub fn diff_hunk_to_display(
hunk: &DiffHunk<MultiBufferRow>,
hunk: &MultiBufferDiffHunk,
snapshot: &DisplaySnapshot,
) -> DisplayDiffHunk {
let hunk_start_point = Point::new(hunk.associated_range.start.0, 0);
let hunk_start_point_sub = Point::new(hunk.associated_range.start.0.saturating_sub(1), 0);
let hunk_start_point = Point::new(hunk.row_range.start.0, 0);
let hunk_start_point_sub = Point::new(hunk.row_range.start.0.saturating_sub(1), 0);
let hunk_end_point_sub = Point::new(
hunk.associated_range
hunk.row_range
.end
.0
.saturating_sub(1)
.max(hunk.associated_range.start.0),
.max(hunk.row_range.start.0),
0,
);
let status = hunk_status(hunk);
let is_removal = status == DiffHunkStatus::Removed;
let folds_start = Point::new(hunk.associated_range.start.0.saturating_sub(2), 0);
let folds_end = Point::new(hunk.associated_range.end.0 + 2, 0);
let folds_start = Point::new(hunk.row_range.start.0.saturating_sub(2), 0);
let folds_end = Point::new(hunk.row_range.end.0 + 2, 0);
let folds_range = folds_start..folds_end;
let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| {
@ -87,7 +87,7 @@ pub fn diff_hunk_to_display(
} else {
let start = hunk_start_point.to_display_point(snapshot).row();
let hunk_end_row = hunk.associated_range.end.max(hunk.associated_range.start);
let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start);
let hunk_end_point = Point::new(hunk_end_row.0, 0);
let multi_buffer_start = snapshot.buffer_snapshot.anchor_after(hunk_start_point);
@ -195,7 +195,7 @@ mod tests {
cx.background_executor.run_until_parked();
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
let mut multibuffer = MultiBuffer::new(ReadWrite);
multibuffer.push_excerpts(
buffer_1.clone(),
[
@ -288,7 +288,7 @@ mod tests {
assert_eq!(
snapshot
.git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12))
.map(|hunk| (hunk_status(&hunk), hunk.associated_range))
.map(|hunk| (hunk_status(&hunk), hunk.row_range))
.collect::<Vec<_>>(),
&expected,
);
@ -296,7 +296,7 @@ mod tests {
assert_eq!(
snapshot
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12))
.map(|hunk| (hunk_status(&hunk), hunk.associated_range))
.map(|hunk| (hunk_status(&hunk), hunk.row_range))
.collect::<Vec<_>>(),
expected
.iter()

View File

@ -1205,6 +1205,7 @@ mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});

View File

@ -1337,6 +1337,7 @@ mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});

View File

@ -4,11 +4,12 @@ use std::{
};
use collections::{hash_map, HashMap, HashSet};
use git::diff::{DiffHunk, DiffHunkStatus};
use git::diff::DiffHunkStatus;
use gpui::{Action, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View};
use language::Buffer;
use multi_buffer::{
Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferRow, MultiBufferSnapshot, ToPoint,
Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow,
MultiBufferSnapshot, ToPoint,
};
use settings::SettingsStore;
use text::{BufferId, Point};
@ -190,9 +191,9 @@ impl Editor {
.buffer_snapshot
.git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX)
.filter(|hunk| {
let hunk_display_row_range = Point::new(hunk.associated_range.start.0, 0)
let hunk_display_row_range = Point::new(hunk.row_range.start.0, 0)
.to_display_point(&snapshot.display_snapshot)
..Point::new(hunk.associated_range.end.0, 0)
..Point::new(hunk.row_range.end.0, 0)
.to_display_point(&snapshot.display_snapshot);
let row_range_end =
display_rows_with_expanded_hunks.get(&hunk_display_row_range.start.row());
@ -203,7 +204,7 @@ impl Editor {
fn toggle_hunks_expanded(
&mut self,
hunks_to_toggle: Vec<DiffHunk<MultiBufferRow>>,
hunks_to_toggle: Vec<MultiBufferDiffHunk>,
cx: &mut ViewContext<Self>,
) {
let previous_toggle_task = self.expanded_hunks.hunk_update_tasks.remove(&None);
@ -274,8 +275,8 @@ impl Editor {
});
for remaining_hunk in hunks_to_toggle {
let remaining_hunk_point_range =
Point::new(remaining_hunk.associated_range.start.0, 0)
..Point::new(remaining_hunk.associated_range.end.0, 0);
Point::new(remaining_hunk.row_range.start.0, 0)
..Point::new(remaining_hunk.row_range.end.0, 0);
hunks_to_expand.push(HoveredHunk {
status: hunk_status(&remaining_hunk),
multi_buffer_range: remaining_hunk_point_range
@ -705,7 +706,7 @@ impl Editor {
fn to_diff_hunk(
hovered_hunk: &HoveredHunk,
multi_buffer_snapshot: &MultiBufferSnapshot,
) -> Option<DiffHunk<MultiBufferRow>> {
) -> Option<MultiBufferDiffHunk> {
let buffer_id = hovered_hunk
.multi_buffer_range
.start
@ -716,9 +717,8 @@ fn to_diff_hunk(
let point_range = hovered_hunk
.multi_buffer_range
.to_point(multi_buffer_snapshot);
Some(DiffHunk {
associated_range: MultiBufferRow(point_range.start.row)
..MultiBufferRow(point_range.end.row),
Some(MultiBufferDiffHunk {
row_range: MultiBufferRow(point_range.start.row)..MultiBufferRow(point_range.end.row),
buffer_id,
buffer_range,
diff_base_byte_range: hovered_hunk.diff_base_byte_range.clone(),
@ -764,7 +764,7 @@ fn editor_with_deleted_text(
let parent_editor = cx.view().downgrade();
let editor = cx.new_view(|cx| {
let multi_buffer =
cx.new_model(|_| MultiBuffer::without_headers(0, language::Capability::ReadOnly));
cx.new_model(|_| MultiBuffer::without_headers(language::Capability::ReadOnly));
multi_buffer.update(cx, |multi_buffer, cx| {
multi_buffer.push_excerpts(
diff_base_buffer,
@ -868,7 +868,7 @@ fn editor_with_deleted_text(
fn buffer_diff_hunk(
buffer_snapshot: &MultiBufferSnapshot,
row_range: Range<Point>,
) -> Option<DiffHunk<MultiBufferRow>> {
) -> Option<MultiBufferDiffHunk> {
let mut hunks = buffer_snapshot.git_diff_hunks_in_range(
MultiBufferRow(row_range.start.row)..MultiBufferRow(row_range.end.row),
);

View File

@ -1296,6 +1296,7 @@ pub mod tests {
show_type_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Type)),
show_parameter_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Parameter)),
show_other_hints: allowed_hint_kinds.contains(&None),
show_background: false,
})
});
@ -1428,6 +1429,7 @@ pub mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
@ -1547,6 +1549,7 @@ pub mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
@ -1777,6 +1780,7 @@ pub mod tests {
show_type_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Type)),
show_parameter_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Parameter)),
show_other_hints: allowed_hint_kinds.contains(&None),
show_background: false,
})
});
@ -1941,6 +1945,7 @@ pub mod tests {
show_parameter_hints: new_allowed_hint_kinds
.contains(&Some(InlayHintKind::Parameter)),
show_other_hints: new_allowed_hint_kinds.contains(&None),
show_background: false,
})
});
cx.executor().run_until_parked();
@ -1987,6 +1992,7 @@ pub mod tests {
show_parameter_hints: another_allowed_hint_kinds
.contains(&Some(InlayHintKind::Parameter)),
show_other_hints: another_allowed_hint_kinds.contains(&None),
show_background: false,
})
});
cx.executor().run_until_parked();
@ -2047,6 +2053,7 @@ pub mod tests {
show_parameter_hints: final_allowed_hint_kinds
.contains(&Some(InlayHintKind::Parameter)),
show_other_hints: final_allowed_hint_kinds.contains(&None),
show_background: false,
})
});
cx.executor().run_until_parked();
@ -2122,6 +2129,7 @@ pub mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
@ -2256,6 +2264,7 @@ pub mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
@ -2551,6 +2560,7 @@ pub mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
@ -2597,7 +2607,7 @@ pub mod tests {
.await
.unwrap();
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite);
let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
multibuffer.push_excerpts(
buffer_1.clone(),
[
@ -2902,6 +2912,7 @@ pub mod tests {
show_type_hints: false,
show_parameter_hints: false,
show_other_hints: false,
show_background: false,
})
});
@ -2946,7 +2957,7 @@ pub mod tests {
})
.await
.unwrap();
let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite));
let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite));
let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| {
let buffer_1_excerpts = multibuffer.push_excerpts(
buffer_1.clone(),
@ -3096,6 +3107,7 @@ pub mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
cx.executor().run_until_parked();
@ -3131,6 +3143,7 @@ pub mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
@ -3225,6 +3238,7 @@ pub mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
@ -3305,6 +3319,7 @@ pub mod tests {
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
show_background: false,
})
});
cx.executor().run_until_parked();

View File

@ -68,7 +68,6 @@ impl FollowableItem for Editor {
unreachable!()
};
let replica_id = project.read(cx).replica_id();
let buffer_ids = state
.excerpts
.iter()
@ -92,7 +91,7 @@ impl FollowableItem for Editor {
if state.singleton && buffers.len() == 1 {
multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx)
} else {
multibuffer = MultiBuffer::new(replica_id, project.read(cx).capability());
multibuffer = MultiBuffer::new(project.read(cx).capability());
let mut excerpts = state.excerpts.into_iter().peekable();
while let Some(excerpt) = excerpts.peek() {
let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else {
@ -1087,10 +1086,14 @@ impl SerializableItem for Editor {
let workspace_id = workspace.database_id()?;
let buffer = self.buffer().read(cx).as_singleton()?;
let path = buffer
.read(cx)
.file()
.map(|file| file.full_path(cx))
.and_then(|full_path| project.read(cx).find_project_path(&full_path, cx))
.and_then(|project_path| project.read(cx).absolute_path(&project_path, cx));
let is_dirty = buffer.read(cx).is_dirty();
let local_file = buffer.read(cx).file().and_then(|file| file.as_local());
let path = local_file.map(|file| file.abs_path(cx));
let mtime = buffer.read(cx).saved_mtime();
let snapshot = buffer.read(cx).snapshot();

View File

@ -928,7 +928,7 @@ mod tests {
let buffer = cx.new_model(|cx| Buffer::local("abc\ndefg\nhijkl\nmn", cx));
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite);
let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
multibuffer.push_excerpts(
buffer.clone(),
[

View File

@ -0,0 +1,125 @@
use crate::{Editor, EditorEvent};
use collections::HashSet;
use futures::{channel::mpsc, future::join_all};
use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View};
use language::{Buffer, BufferEvent, Capability};
use multi_buffer::{ExcerptRange, MultiBuffer};
use project::Project;
use smol::stream::StreamExt;
use std::{ops::Range, time::Duration};
use text::ToOffset;
use ui::prelude::*;
use workspace::Item;
pub struct ProposedChangesEditor {
editor: View<Editor>,
_subscriptions: Vec<Subscription>,
_recalculate_diffs_task: Task<Option<()>>,
recalculate_diffs_tx: mpsc::UnboundedSender<Model<Buffer>>,
}
pub struct ProposedChangesBuffer<T> {
pub buffer: Model<Buffer>,
pub ranges: Vec<Range<T>>,
}
impl ProposedChangesEditor {
pub fn new<T: ToOffset>(
buffers: Vec<ProposedChangesBuffer<T>>,
project: Option<Model<Project>>,
cx: &mut ViewContext<Self>,
) -> Self {
let mut subscriptions = Vec::new();
let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite));
for buffer in buffers {
let branch_buffer = buffer.buffer.update(cx, |buffer, cx| buffer.branch(cx));
subscriptions.push(cx.subscribe(&branch_buffer, Self::on_buffer_event));
multibuffer.update(cx, |multibuffer, cx| {
multibuffer.push_excerpts(
branch_buffer,
buffer.ranges.into_iter().map(|range| ExcerptRange {
context: range,
primary: None,
}),
cx,
);
});
}
let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded();
Self {
editor: cx
.new_view(|cx| Editor::for_multibuffer(multibuffer.clone(), project, true, cx)),
recalculate_diffs_tx,
_recalculate_diffs_task: cx.spawn(|_, mut cx| async move {
let mut buffers_to_diff = HashSet::default();
while let Some(buffer) = recalculate_diffs_rx.next().await {
buffers_to_diff.insert(buffer);
loop {
cx.background_executor()
.timer(Duration::from_millis(250))
.await;
let mut had_further_changes = false;
while let Ok(next_buffer) = recalculate_diffs_rx.try_next() {
buffers_to_diff.insert(next_buffer?);
had_further_changes = true;
}
if !had_further_changes {
break;
}
}
join_all(buffers_to_diff.drain().filter_map(|buffer| {
buffer
.update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx))
.ok()?
}))
.await;
}
None
}),
_subscriptions: subscriptions,
}
}
fn on_buffer_event(
&mut self,
buffer: Model<Buffer>,
event: &BufferEvent,
_cx: &mut ViewContext<Self>,
) {
if let BufferEvent::Edited = event {
self.recalculate_diffs_tx.unbounded_send(buffer).ok();
}
}
}
impl Render for ProposedChangesEditor {
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
self.editor.clone()
}
}
impl FocusableView for ProposedChangesEditor {
fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle {
self.editor.focus_handle(cx)
}
}
impl EventEmitter<EditorEvent> for ProposedChangesEditor {}
impl Item for ProposedChangesEditor {
type Event = EditorEvent;
fn tab_icon(&self, _cx: &ui::WindowContext) -> Option<Icon> {
Some(Icon::new(IconName::Pencil))
}
fn tab_content_text(&self, _cx: &WindowContext) -> Option<SharedString> {
Some("Proposed changes".into())
}
}

View File

@ -108,16 +108,16 @@ pub fn editor_hunks(
.buffer_snapshot
.git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX)
.map(|hunk| {
let display_range = Point::new(hunk.associated_range.start.0, 0)
let display_range = Point::new(hunk.row_range.start.0, 0)
.to_display_point(snapshot)
.row()
..Point::new(hunk.associated_range.end.0, 0)
..Point::new(hunk.row_range.end.0, 0)
.to_display_point(snapshot)
.row();
let (_, buffer, _) = editor
.buffer()
.read(cx)
.excerpt_containing(Point::new(hunk.associated_range.start.0, 0), cx)
.excerpt_containing(Point::new(hunk.row_range.start.0, 0), cx)
.expect("no excerpt for expanded buffer's hunk start");
let diff_base = buffer
.read(cx)

View File

@ -75,7 +75,7 @@ impl EditorTestContext {
cx: &mut gpui::TestAppContext,
excerpts: [&str; COUNT],
) -> EditorTestContext {
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
let buffer = cx.new_model(|cx| {
for excerpt in excerpts.into_iter() {
let (text, ranges) = marked_text_ranges(excerpt, false);

View File

@ -24,6 +24,7 @@ feature_flags.workspace = true
fs.workspace = true
git.workspace = true
gpui.workspace = true
isahc_http_client.workspace = true
language.workspace = true
languages.workspace = true
http_client.workspace = true

View File

@ -12,13 +12,16 @@ use language::LanguageRegistry;
use node_runtime::FakeNodeRuntime;
use open_ai::OpenAiEmbeddingModel;
use project::Project;
use semantic_index::{OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status};
use semantic_index::{
EmbeddingProvider, OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status,
};
use serde::{Deserialize, Serialize};
use settings::SettingsStore;
use smol::channel::bounded;
use smol::io::AsyncReadExt;
use smol::Timer;
use std::ops::RangeInclusive;
use std::path::PathBuf;
use std::time::Duration;
use std::{
fs,
@ -97,13 +100,14 @@ fn main() -> Result<()> {
gpui::App::headless().run(move |cx| {
let executor = cx.background_executor().clone();
let client = isahc_http_client::IsahcHttpClient::new(None, None);
cx.set_http_client(client.clone());
match cli.command {
Commands::Fetch {} => {
executor
.clone()
.spawn(async move {
if let Err(err) = fetch_evaluation_resources(&executor).await {
if let Err(err) = fetch_evaluation_resources(client, &executor).await {
eprintln!("Error: {}", err);
exit(1);
}
@ -127,10 +131,12 @@ fn main() -> Result<()> {
Ok(())
}
async fn fetch_evaluation_resources(executor: &BackgroundExecutor) -> Result<()> {
let http_client = http_client::HttpClientWithProxy::new(None, None);
fetch_code_search_net_resources(&http_client).await?;
fetch_eval_repos(executor, &http_client).await?;
async fn fetch_evaluation_resources(
http_client: Arc<dyn HttpClient>,
executor: &BackgroundExecutor,
) -> Result<()> {
fetch_code_search_net_resources(&*http_client).await?;
fetch_eval_repos(executor, &*http_client).await?;
Ok(())
}
@ -234,11 +240,20 @@ async fn fetch_code_search_net_resources(http_client: &dyn HttpClient) -> Result
Ok(())
}
#[derive(Default, Debug)]
struct Counts {
covered_results: usize,
overlapped_results: usize,
covered_files: usize,
total_results: usize,
}
async fn run_evaluation(
only_repo: Option<String>,
executor: &BackgroundExecutor,
cx: &mut AsyncAppContext,
) -> Result<()> {
let mut http_client = None;
cx.update(|cx| {
let mut store = SettingsStore::new(cx);
store
@ -248,15 +263,15 @@ async fn run_evaluation(
client::init_settings(cx);
language::init(cx);
Project::init_settings(cx);
http_client = Some(cx.http_client());
cx.update_flags(false, vec![]);
})
.unwrap();
let http_client = http_client.unwrap();
let dataset_dir = Path::new(CODESEARCH_NET_DIR);
let evaluations_path = dataset_dir.join("evaluations.json");
let repos_dir = Path::new(EVAL_REPOS_DIR);
let db_path = Path::new(EVAL_DB_PATH);
let http_client = http_client::HttpClientWithProxy::new(None, None);
let api_key = std::env::var("OPENAI_API_KEY").unwrap();
let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new());
let fs = Arc::new(RealFs::new(git_hosting_provider_registry, None)) as Arc<dyn Fs>;
@ -266,9 +281,9 @@ async fn run_evaluation(
Client::new(
clock,
Arc::new(http_client::HttpClientWithUrl::new(
http_client.clone(),
"https://zed.dev",
None,
None,
)),
cx,
)
@ -293,12 +308,11 @@ async fn run_evaluation(
cx.update(|cx| languages::init(language_registry.clone(), node_runtime.clone(), cx))
.unwrap();
let mut covered_result_count = 0;
let mut overlapped_result_count = 0;
let mut covered_file_count = 0;
let mut total_result_count = 0;
let mut counts = Counts::default();
eprint!("Running evals.");
let mut failures = Vec::new();
for evaluation_project in evaluations {
if only_repo
.as_ref()
@ -310,27 +324,24 @@ async fn run_evaluation(
eprint!("\r\x1B[2K");
eprint!(
"Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. Project: {}...",
covered_result_count,
total_result_count,
overlapped_result_count,
total_result_count,
covered_file_count,
total_result_count,
counts.covered_results,
counts.total_results,
counts.overlapped_results,
counts.total_results,
counts.covered_files,
counts.total_results,
evaluation_project.repo
);
let repo_db_path =
db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_")));
let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider.clone(), cx)
.await
.unwrap();
let repo_dir = repos_dir.join(&evaluation_project.repo);
if !repo_dir.exists() || repo_dir.join(SKIP_EVAL_PATH).exists() {
eprintln!("Skipping {}: directory not found", evaluation_project.repo);
continue;
}
let repo_db_path =
db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_")));
let project = cx
.update(|cx| {
Project::local(
@ -345,116 +356,193 @@ async fn run_evaluation(
})
.unwrap();
let (worktree, _) = project
.update(cx, |project, cx| {
project.find_or_create_worktree(repo_dir, true, cx)
})?
.await?;
let repo = evaluation_project.repo.clone();
if let Err(err) = run_eval_project(
evaluation_project,
&user_store,
repo_db_path,
&repo_dir,
&mut counts,
project,
embedding_provider.clone(),
fs.clone(),
cx,
)
.await
{
eprintln!("{repo} eval failed with error: {:?}", err);
worktree
.update(cx, |worktree, _| {
worktree.as_local().unwrap().scan_complete()
})
.unwrap()
.await;
let project_index = cx
.update(|cx| semantic_index.create_project_index(project.clone(), cx))
.unwrap();
wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await;
for query in evaluation_project.queries {
let results = cx
.update(|cx| {
let project_index = project_index.read(cx);
project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx)
})
.unwrap()
.await
.unwrap();
let results = SemanticDb::load_results(results, &fs.clone(), &cx)
.await
.unwrap();
let mut project_covered_result_count = 0;
let mut project_overlapped_result_count = 0;
let mut project_covered_file_count = 0;
let mut covered_result_indices = Vec::new();
for expected_result in &query.expected_results {
let mut file_matched = false;
let mut range_overlapped = false;
let mut range_covered = false;
for (ix, result) in results.iter().enumerate() {
if result.path.as_ref() == Path::new(&expected_result.file) {
file_matched = true;
let start_matched =
result.row_range.contains(&expected_result.lines.start());
let end_matched = result.row_range.contains(&expected_result.lines.end());
if start_matched || end_matched {
range_overlapped = true;
}
if start_matched && end_matched {
range_covered = true;
covered_result_indices.push(ix);
break;
}
}
}
if range_covered {
project_covered_result_count += 1
};
if range_overlapped {
project_overlapped_result_count += 1
};
if file_matched {
project_covered_file_count += 1
};
}
let outcome_repo = evaluation_project.repo.clone();
let query_results = EvaluationQueryOutcome {
repo: outcome_repo,
query: query.query,
total_result_count: query.expected_results.len(),
covered_result_count: project_covered_result_count,
overlapped_result_count: project_overlapped_result_count,
covered_file_count: project_covered_file_count,
expected_results: query.expected_results,
actual_results: results
.iter()
.map(|result| EvaluationSearchResult {
file: result.path.to_string_lossy().to_string(),
lines: result.row_range.clone(),
})
.collect(),
covered_result_indices,
};
overlapped_result_count += query_results.overlapped_result_count;
covered_result_count += query_results.covered_result_count;
covered_file_count += query_results.covered_file_count;
total_result_count += query_results.total_result_count;
println!("{}", serde_json::to_string(&query_results).unwrap());
failures.push((repo, err));
}
}
eprint!(
"Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured.",
covered_result_count,
total_result_count,
overlapped_result_count,
total_result_count,
covered_file_count,
total_result_count,
eprintln!(
"Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. {} failed.",
counts.covered_results,
counts.total_results,
counts.overlapped_results,
counts.total_results,
counts.covered_files,
counts.total_results,
failures.len(),
);
Ok(())
if failures.is_empty() {
Ok(())
} else {
eprintln!("Failures:\n");
for (index, (repo, failure)) in failures.iter().enumerate() {
eprintln!("Failure #{} - {repo}\n{:?}", index + 1, failure);
}
Err(anyhow::anyhow!("Some evals failed."))
}
}
#[allow(clippy::too_many_arguments)]
async fn run_eval_project(
evaluation_project: EvaluationProject,
user_store: &Model<UserStore>,
repo_db_path: PathBuf,
repo_dir: &Path,
counts: &mut Counts,
project: Model<Project>,
embedding_provider: Arc<dyn EmbeddingProvider>,
fs: Arc<dyn Fs>,
cx: &mut AsyncAppContext,
) -> Result<(), anyhow::Error> {
let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider, cx).await?;
let (worktree, _) = project
.update(cx, |project, cx| {
project.find_or_create_worktree(repo_dir, true, cx)
})?
.await?;
worktree
.update(cx, |worktree, _| {
worktree.as_local().unwrap().scan_complete()
})?
.await;
let project_index = cx.update(|cx| semantic_index.create_project_index(project.clone(), cx))?;
wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await;
for query in evaluation_project.queries {
let results = {
// Retry search up to 3 times in case of timeout, network failure, etc.
let mut retries_remaining = 3;
let mut result;
loop {
match cx.update(|cx| {
let project_index = project_index.read(cx);
project_index.search(vec![query.query.clone()], SEARCH_RESULT_LIMIT, cx)
}) {
Ok(task) => match task.await {
Ok(answer) => {
result = Ok(answer);
break;
}
Err(err) => {
result = Err(err);
}
},
Err(err) => {
result = Err(err);
}
}
if retries_remaining > 0 {
eprintln!(
"Retrying search after it failed on query {:?} with {:?}",
query, result
);
retries_remaining -= 1;
} else {
eprintln!(
"Ran out of retries; giving up on search which failed on query {:?} with {:?}",
query, result
);
break;
}
}
SemanticDb::load_results(result?, &fs.clone(), &cx).await?
};
let mut project_covered_result_count = 0;
let mut project_overlapped_result_count = 0;
let mut project_covered_file_count = 0;
let mut covered_result_indices = Vec::new();
for expected_result in &query.expected_results {
let mut file_matched = false;
let mut range_overlapped = false;
let mut range_covered = false;
for (ix, result) in results.iter().enumerate() {
if result.path.as_ref() == Path::new(&expected_result.file) {
file_matched = true;
let start_matched = result.row_range.contains(&expected_result.lines.start());
let end_matched = result.row_range.contains(&expected_result.lines.end());
if start_matched || end_matched {
range_overlapped = true;
}
if start_matched && end_matched {
range_covered = true;
covered_result_indices.push(ix);
break;
}
}
}
if range_covered {
project_covered_result_count += 1
};
if range_overlapped {
project_overlapped_result_count += 1
};
if file_matched {
project_covered_file_count += 1
};
}
let outcome_repo = evaluation_project.repo.clone();
let query_results = EvaluationQueryOutcome {
repo: outcome_repo,
query: query.query,
total_result_count: query.expected_results.len(),
covered_result_count: project_covered_result_count,
overlapped_result_count: project_overlapped_result_count,
covered_file_count: project_covered_file_count,
expected_results: query.expected_results,
actual_results: results
.iter()
.map(|result| EvaluationSearchResult {
file: result.path.to_string_lossy().to_string(),
lines: result.row_range.clone(),
})
.collect(),
covered_result_indices,
};
counts.overlapped_results += query_results.overlapped_result_count;
counts.covered_results += query_results.covered_result_count;
counts.covered_files += query_results.covered_file_count;
counts.total_results += query_results.total_result_count;
println!("{}", serde_json::to_string(&query_results)?);
}
user_store.update(cx, |_, _| {
drop(semantic_index);
drop(project);
drop(worktree);
drop(project_index);
})
}
async fn wait_for_indexing_complete(
@ -511,7 +599,7 @@ async fn fetch_eval_repos(
let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json");
let evaluations: Vec<EvaluationProject> = serde_json::from_slice(&evaluations).unwrap();
eprint!("Fetching evaluation repositories...");
eprintln!("Fetching evaluation repositories...");
executor
.scoped(move |scope| {

View File

@ -57,6 +57,7 @@ task.workspace = true
serde_json_lenient.workspace = true
[dev-dependencies]
isahc_http_client.workspace = true
ctor.workspace = true
env_logger.workspace = true
parking_lot.workspace = true

View File

@ -246,6 +246,7 @@ impl ExtensionBuilder {
.args(scanner_path.exists().then_some(scanner_path))
.output()
.context("failed to run clang")?;
if !clang_output.status.success() {
bail!(
"failed to compile {} parser with clang: {}",
@ -431,6 +432,7 @@ impl ExtensionBuilder {
let body = BufReader::new(response.body_mut());
let body = GzipDecoder::new(body);
let tar = Archive::new(body);
tar.unpack(&tar_out_dir)
.await
.context("failed to unpack wasi-sdk archive")?;

View File

@ -38,7 +38,7 @@ impl LspAdapter for ExtensionLspAdapter {
fn get_language_server_command<'a>(
self: Arc<Self>,
_: Arc<Path>,
_: Option<Arc<Path>>,
delegate: Arc<dyn LspAdapterDelegate>,
_: futures::lock::MutexGuard<'a, Option<LanguageServerBinary>>,
_: &'a mut AsyncAppContext,

View File

@ -190,6 +190,7 @@ pub fn init(
None,
fs,
client.http_client().clone(),
client.http_client().clone(),
Some(client.telemetry().clone()),
node_runtime,
language_registry,
@ -225,6 +226,7 @@ impl ExtensionStore {
build_dir: Option<PathBuf>,
fs: Arc<dyn Fs>,
http_client: Arc<HttpClientWithUrl>,
builder_client: Arc<dyn HttpClient>,
telemetry: Option<Arc<Telemetry>>,
node_runtime: Arc<dyn NodeRuntime>,
language_registry: Arc<LanguageRegistry>,
@ -244,12 +246,7 @@ impl ExtensionStore {
extension_index: Default::default(),
installed_dir,
index_path,
builder: Arc::new(ExtensionBuilder::new(
// Construct a real HTTP client for the extension builder, as we
// don't want to use a fake one in the tests.
::http_client::client(None, http_client.proxy().cloned()),
build_dir,
)),
builder: Arc::new(ExtensionBuilder::new(builder_client, build_dir)),
outstanding_operations: Default::default(),
modified_extensions: Default::default(),
reload_complete_senders: Vec::new(),

View File

@ -13,10 +13,12 @@ use futures::{io::BufReader, AsyncReadExt, StreamExt};
use gpui::{Context, SemanticVersion, TestAppContext};
use http_client::{FakeHttpClient, Response};
use indexed_docs::IndexedDocsRegistry;
use isahc_http_client::IsahcHttpClient;
use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName};
use node_runtime::FakeNodeRuntime;
use parking_lot::Mutex;
use project::{Project, DEFAULT_COMPLETION_CONTEXT};
use release_channel::AppVersion;
use serde_json::json;
use settings::{Settings as _, SettingsStore};
use snippet_provider::SnippetRegistry;
@ -270,6 +272,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
None,
fs.clone(),
http_client.clone(),
http_client.clone(),
None,
node_runtime.clone(),
language_registry.clone(),
@ -397,6 +400,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
None,
fs.clone(),
http_client.clone(),
http_client.clone(),
None,
node_runtime.clone(),
language_registry.clone(),
@ -502,7 +506,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
http_request_count: 0,
}));
let http_client = FakeHttpClient::create({
let extension_client = FakeHttpClient::create({
let language_server_version = language_server_version.clone();
move |request| {
let language_server_version = language_server_version.clone();
@ -564,13 +568,23 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
}
}
});
let user_agent = cx.update(|cx| {
format!(
"Zed/{} ({}; {})",
AppVersion::global(cx),
std::env::consts::OS,
std::env::consts::ARCH
)
});
let builder_client = IsahcHttpClient::new(None, Some(user_agent));
let extension_store = cx.new_model(|cx| {
ExtensionStore::new(
extensions_dir.clone(),
Some(cache_dir),
fs.clone(),
http_client.clone(),
extension_client.clone(),
builder_client,
None,
node_runtime,
language_registry.clone(),

View File

@ -9,10 +9,10 @@ use futures::{io::BufReader, FutureExt as _};
use futures::{lock::Mutex, AsyncReadExt};
use indexed_docs::IndexedDocsDatabase;
use isahc::config::{Configurable, RedirectPolicy};
use language::LanguageName;
use language::{
language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate,
};
use language::{LanguageName, LanguageServerName};
use project::project_settings::ProjectSettings;
use semantic_version::SemanticVersion;
use std::{
@ -366,7 +366,7 @@ impl ExtensionImports for WasmState {
.and_then(|key| {
ProjectSettings::get(location, cx)
.lsp
.get(&Arc::<str>::from(key))
.get(&LanguageServerName(key.into()))
})
.cloned()
.unwrap_or_default();

View File

@ -9,10 +9,10 @@ use futures::{io::BufReader, FutureExt as _};
use futures::{lock::Mutex, AsyncReadExt};
use indexed_docs::IndexedDocsDatabase;
use isahc::config::{Configurable, RedirectPolicy};
use language::LanguageName;
use language::{
language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate,
};
use language::{LanguageName, LanguageServerName};
use project::project_settings::ProjectSettings;
use semantic_version::SemanticVersion;
use std::{
@ -412,7 +412,7 @@ impl ExtensionImports for WasmState {
.and_then(|key| {
ProjectSettings::get(location, cx)
.lsp
.get(&Arc::<str>::from(key))
.get(&LanguageServerName::from_proto(key))
})
.cloned()
.unwrap_or_default();

View File

@ -18,7 +18,7 @@ clap = { workspace = true, features = ["derive"] }
env_logger.workspace = true
extension = { workspace = true, features = ["no-webrtc"] }
fs.workspace = true
http_client.workspace = true
isahc_http_client.workspace = true
language.workspace = true
log.workspace = true
rpc.workspace = true

View File

@ -7,13 +7,13 @@ use std::{
};
use ::fs::{copy_recursive, CopyOptions, Fs, RealFs};
use ::http_client::HttpClientWithProxy;
use anyhow::{anyhow, bail, Context, Result};
use clap::Parser;
use extension::{
extension_builder::{CompileExtensionOptions, ExtensionBuilder},
ExtensionManifest,
};
use isahc_http_client::IsahcHttpClient;
use language::LanguageConfig;
use theme::ThemeRegistry;
use tree_sitter::{Language, Query, WasmStore};
@ -66,7 +66,13 @@ async fn main() -> Result<()> {
std::env::consts::OS,
std::env::consts::ARCH
);
let http_client = Arc::new(HttpClientWithProxy::new(Some(user_agent), None));
let http_client = Arc::new(
IsahcHttpClient::builder()
.default_header("User-Agent", user_agent)
.build()
.map(IsahcHttpClient::from)?,
);
let builder = ExtensionBuilder::new(http_client, scratch_dir);
builder
.compile_extension(

View File

@ -44,8 +44,8 @@ const FEEDBACK_SUBMISSION_ERROR_TEXT: &str =
struct FeedbackRequestBody<'a> {
feedback_text: &'a str,
email: Option<String>,
metrics_id: Option<Arc<str>>,
installation_id: Option<Arc<str>>,
metrics_id: Option<Arc<str>>,
system_specs: SystemSpecs,
is_staff: bool,
}
@ -296,16 +296,16 @@ impl FeedbackModal {
}
let telemetry = zed_client.telemetry();
let metrics_id = telemetry.metrics_id();
let installation_id = telemetry.installation_id();
let metrics_id = telemetry.metrics_id();
let is_staff = telemetry.is_staff();
let http_client = zed_client.http_client();
let feedback_endpoint = http_client.build_url("/api/feedback");
let request = FeedbackRequestBody {
feedback_text,
email,
metrics_id,
installation_id,
metrics_id,
system_specs,
is_staff: is_staff.unwrap_or(false),
};

View File

@ -16,14 +16,17 @@ doctest = false
anyhow.workspace = true
collections.workspace = true
editor.workspace = true
file_icons.workspace = true
futures.workspace = true
fuzzy.workspace = true
gpui.workspace = true
menu.workspace = true
picker.workspace = true
project.workspace = true
schemars.workspace = true
settings.workspace = true
serde.workspace = true
serde_derive.workspace = true
text.workspace = true
theme.workspace = true
ui.workspace = true

View File

@ -1,11 +1,14 @@
#[cfg(test)]
mod file_finder_tests;
mod file_finder_settings;
mod new_path_prompt;
mod open_path_prompt;
use collections::HashMap;
use editor::{scroll::Autoscroll, Bias, Editor};
use file_finder_settings::FileFinderSettings;
use file_icons::FileIcons;
use fuzzy::{CharBag, PathMatch, PathMatchCandidate};
use gpui::{
actions, rems, Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle,
@ -28,7 +31,7 @@ use std::{
use text::Point;
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
use util::{paths::PathWithPosition, post_inc, ResultExt};
use workspace::{item::PreviewTabsSettings, ModalView, Workspace};
use workspace::{item::PreviewTabsSettings, notifications::NotifyResultExt, ModalView, Workspace};
actions!(file_finder, [SelectPrev]);
@ -39,7 +42,12 @@ pub struct FileFinder {
init_modifiers: Option<Modifiers>,
}
pub fn init_settings(cx: &mut AppContext) {
FileFinderSettings::register(cx);
}
pub fn init(cx: &mut AppContext) {
init_settings(cx);
cx.observe_new_views(FileFinder::register).detach();
cx.observe_new_views(NewPathPrompt::register).detach();
cx.observe_new_views(OpenPathPrompt::register).detach();
@ -1003,7 +1011,7 @@ impl PickerDelegate for FileFinderDelegate {
let finder = self.file_finder.clone();
cx.spawn(|_, mut cx| async move {
let item = open_task.await.log_err()?;
let item = open_task.await.notify_async_err(&mut cx)?;
if let Some(row) = row {
if let Some(active_editor) = item.downcast::<Editor>() {
active_editor
@ -1041,12 +1049,14 @@ impl PickerDelegate for FileFinderDelegate {
selected: bool,
cx: &mut ViewContext<Picker<Self>>,
) -> Option<Self::ListItem> {
let settings = FileFinderSettings::get_global(cx);
let path_match = self
.matches
.get(ix)
.expect("Invalid matches state: no element for index {ix}");
let icon = match &path_match {
let history_icon = match &path_match {
Match::History { .. } => Icon::new(IconName::HistoryRerun)
.color(Color::Muted)
.size(IconSize::Small)
@ -1059,10 +1069,17 @@ impl PickerDelegate for FileFinderDelegate {
let (file_name, file_name_positions, full_path, full_path_positions) =
self.labels_for_match(path_match, cx, ix);
let file_icon = if settings.file_icons {
FileIcons::get_icon(Path::new(&file_name), cx).map(Icon::from_path)
} else {
None
};
Some(
ListItem::new(ix)
.spacing(ListItemSpacing::Sparse)
.end_slot::<AnyElement>(Some(icon))
.start_slot::<Icon>(file_icon)
.end_slot::<AnyElement>(history_icon)
.inset(true)
.selected(selected)
.child(

View File

@ -0,0 +1,27 @@
use anyhow::Result;
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
#[derive(Deserialize, Debug, Clone, Copy, PartialEq)]
pub struct FileFinderSettings {
pub file_icons: bool,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
pub struct FileFinderSettingsContent {
/// Whether to show file icons in the file finder.
///
/// Default: true
pub file_icons: Option<bool>,
}
impl Settings for FileFinderSettings {
const KEY: Option<&'static str> = Some("file_finder");
type FileContent = FileFinderSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut gpui::AppContext) -> Result<Self> {
sources.json_merge()
}
}

View File

@ -1,7 +1,7 @@
use rope::Rope;
use std::{iter, ops::Range};
use sum_tree::SumTree;
use text::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, Point};
use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point};
pub use git2 as libgit;
use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
@ -13,29 +13,30 @@ pub enum DiffHunkStatus {
Removed,
}
/// A diff hunk, representing a range of consequent lines in a singleton buffer, associated with a generic range.
/// A diff hunk resolved to rows in the buffer.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DiffHunk<T> {
/// E.g. a range in multibuffer, that has an excerpt added, singleton buffer for which has this diff hunk.
/// Consider a singleton buffer with 10 lines, all of them are modified — so a corresponding diff hunk would have a range 0..10.
/// And a multibuffer with the excerpt of lines 2-6 from the singleton buffer.
/// If the multibuffer is searched for diff hunks, the associated range would be multibuffer rows, corresponding to rows 2..6 from the singleton buffer.
/// But the hunk range would be 0..10, same for any other excerpts from the same singleton buffer.
pub associated_range: Range<T>,
/// Singleton buffer ID this hunk belongs to.
pub buffer_id: BufferId,
/// A consequent range of lines in the singleton buffer, that were changed and produced this diff hunk.
pub struct DiffHunk {
/// The buffer range, expressed in terms of rows.
pub row_range: Range<u32>,
/// The range in the buffer to which this hunk corresponds.
pub buffer_range: Range<Anchor>,
/// Original singleton buffer text before the change, that was instead of the `buffer_range`.
/// The range in the buffer's diff base text to which this hunk corresponds.
pub diff_base_byte_range: Range<usize>,
}
impl sum_tree::Item for DiffHunk<Anchor> {
/// We store [`InternalDiffHunk`]s internally so we don't need to store the additional row range.
#[derive(Debug, Clone)]
struct InternalDiffHunk {
buffer_range: Range<Anchor>,
diff_base_byte_range: Range<usize>,
}
impl sum_tree::Item for InternalDiffHunk {
type Summary = DiffHunkSummary;
fn summary(&self) -> Self::Summary {
DiffHunkSummary {
buffer_range: self.associated_range.clone(),
buffer_range: self.buffer_range.clone(),
}
}
}
@ -66,7 +67,7 @@ impl sum_tree::Summary for DiffHunkSummary {
#[derive(Debug, Clone)]
pub struct BufferDiff {
last_buffer_version: Option<clock::Global>,
tree: SumTree<DiffHunk<Anchor>>,
tree: SumTree<InternalDiffHunk>,
}
impl BufferDiff {
@ -81,11 +82,12 @@ impl BufferDiff {
self.tree.is_empty()
}
#[cfg(any(test, feature = "test-support"))]
pub fn hunks_in_row_range<'a>(
&'a self,
range: Range<u32>,
buffer: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
) -> impl 'a + Iterator<Item = DiffHunk> {
let start = buffer.anchor_before(Point::new(range.start, 0));
let end = buffer.anchor_after(Point::new(range.end, 0));
@ -96,7 +98,7 @@ impl BufferDiff {
&'a self,
range: Range<Anchor>,
buffer: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
) -> impl 'a + Iterator<Item = DiffHunk> {
let mut cursor = self
.tree
.filter::<_, DiffHunkSummary>(buffer, move |summary| {
@ -111,11 +113,8 @@ impl BufferDiff {
})
.flat_map(move |hunk| {
[
(
&hunk.associated_range.start,
hunk.diff_base_byte_range.start,
),
(&hunk.associated_range.end, hunk.diff_base_byte_range.end),
(&hunk.buffer_range.start, hunk.diff_base_byte_range.start),
(&hunk.buffer_range.end, hunk.diff_base_byte_range.end),
]
.into_iter()
});
@ -131,10 +130,9 @@ impl BufferDiff {
}
Some(DiffHunk {
associated_range: start_point.row..end_point.row,
row_range: start_point.row..end_point.row,
diff_base_byte_range: start_base..end_base,
buffer_range: buffer.anchor_before(start_point)..buffer.anchor_after(end_point),
buffer_id: buffer.remote_id(),
})
})
}
@ -143,7 +141,7 @@ impl BufferDiff {
&'a self,
range: Range<Anchor>,
buffer: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
) -> impl 'a + Iterator<Item = DiffHunk> {
let mut cursor = self
.tree
.filter::<_, DiffHunkSummary>(buffer, move |summary| {
@ -156,7 +154,7 @@ impl BufferDiff {
cursor.prev(buffer);
let hunk = cursor.item()?;
let range = hunk.associated_range.to_point(buffer);
let range = hunk.buffer_range.to_point(buffer);
let end_row = if range.end.column > 0 {
range.end.row + 1
} else {
@ -164,10 +162,9 @@ impl BufferDiff {
};
Some(DiffHunk {
associated_range: range.start.row..end_row,
row_range: range.start.row..end_row,
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
buffer_range: hunk.buffer_range.clone(),
buffer_id: hunk.buffer_id,
})
})
}
@ -198,7 +195,7 @@ impl BufferDiff {
}
#[cfg(test)]
fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk> {
let start = text.anchor_before(Point::new(0, 0));
let end = text.anchor_after(Point::new(u32::MAX, u32::MAX));
self.hunks_intersecting_range(start..end, text)
@ -231,7 +228,7 @@ impl BufferDiff {
hunk_index: usize,
buffer: &text::BufferSnapshot,
buffer_row_divergence: &mut i64,
) -> DiffHunk<Anchor> {
) -> InternalDiffHunk {
let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap();
assert!(line_item_count > 0);
@ -286,11 +283,9 @@ impl BufferDiff {
let start = Point::new(buffer_row_range.start, 0);
let end = Point::new(buffer_row_range.end, 0);
let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end);
DiffHunk {
associated_range: buffer_range.clone(),
InternalDiffHunk {
buffer_range,
diff_base_byte_range,
buffer_id: buffer.remote_id(),
}
}
}
@ -304,17 +299,16 @@ pub fn assert_hunks<Iter>(
diff_base: &str,
expected_hunks: &[(Range<u32>, &str, &str)],
) where
Iter: Iterator<Item = DiffHunk<u32>>,
Iter: Iterator<Item = DiffHunk>,
{
let actual_hunks = diff_hunks
.map(|hunk| {
(
hunk.associated_range.clone(),
hunk.row_range.clone(),
&diff_base[hunk.diff_base_byte_range],
buffer
.text_for_range(
Point::new(hunk.associated_range.start, 0)
..Point::new(hunk.associated_range.end, 0),
Point::new(hunk.row_range.start, 0)..Point::new(hunk.row_range.end, 0),
)
.collect::<String>(),
)

View File

@ -18,7 +18,6 @@ futures.workspace = true
git.workspace = true
gpui.workspace = true
http_client.workspace = true
isahc.workspace = true
regex.workspace = true
serde.workspace = true
serde_json.workspace = true

View File

@ -3,9 +3,7 @@ use std::sync::Arc;
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
use http_client::HttpClient;
use isahc::config::Configurable;
use isahc::{AsyncBody, Request};
use http_client::{AsyncBody, HttpClient, Request};
use serde::Deserialize;
use url::Url;
@ -51,16 +49,14 @@ impl Codeberg {
let url =
format!("https://codeberg.org/api/v1/repos/{repo_owner}/{repo}/git/commits/{commit}");
let mut request = Request::get(&url)
.redirect_policy(isahc::config::RedirectPolicy::Follow)
.header("Content-Type", "application/json");
let mut request = Request::get(&url).header("Content-Type", "application/json");
if let Ok(codeberg_token) = std::env::var("CODEBERG_TOKEN") {
request = request.header("Authorization", format!("Bearer {}", codeberg_token));
}
let mut response = client
.send(request.body(AsyncBody::default())?)
.send_with_redirect_policy(request.body(AsyncBody::default())?, true)
.await
.with_context(|| format!("error fetching Codeberg commit details at {:?}", url))?;

View File

@ -3,9 +3,7 @@ use std::sync::{Arc, OnceLock};
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
use http_client::HttpClient;
use isahc::config::Configurable;
use isahc::{AsyncBody, Request};
use http_client::{AsyncBody, HttpClient, Request};
use regex::Regex;
use serde::Deserialize;
use url::Url;
@ -55,16 +53,14 @@ impl Github {
) -> Result<Option<User>> {
let url = format!("https://api.github.com/repos/{repo_owner}/{repo}/commits/{commit}");
let mut request = Request::get(&url)
.redirect_policy(isahc::config::RedirectPolicy::Follow)
.header("Content-Type", "application/json");
let mut request = Request::get(&url).header("Content-Type", "application/json");
if let Ok(github_token) = std::env::var("GITHUB_TOKEN") {
request = request.header("Authorization", format!("Bearer {}", github_token));
}
let mut response = client
.send(request.body(AsyncBody::default())?)
.send_with_redirect_policy(request.body(AsyncBody::default())?, true)
.await
.with_context(|| format!("error fetching GitHub commit details at {:?}", url))?;

View File

@ -11,13 +11,13 @@ license = "Apache-2.0"
workspace = true
[features]
default = []
default = ["http_client"]
test-support = [
"backtrace",
"collections/test-support",
"rand",
"util/test-support",
"http_client/test-support",
"http_client?/test-support",
]
runtime_shaders = []
macos-blade = ["blade-graphics", "blade-macros", "blade-util", "bytemuck"]
@ -40,7 +40,7 @@ derive_more.workspace = true
etagere = "0.2"
futures.workspace = true
gpui_macros.workspace = true
http_client.workspace = true
http_client = { optional = true, workspace = true }
image = "0.25.1"
itertools.workspace = true
linkme = "0.3"

View File

@ -131,6 +131,7 @@ fn main() {
PathBuf::from_str("crates/gpui/examples/image/app-icon.png").unwrap(),
),
remote_resource: "https://picsum.photos/512/512".into(),
asset_resource: "image/color.svg".into(),
})
})

View File

@ -117,7 +117,7 @@ impl App {
Self(AppContext::new(
current_platform(false),
Arc::new(()),
http_client::client(None, None),
Arc::new(NullHttpClient),
))
}
@ -128,7 +128,7 @@ impl App {
Self(AppContext::new(
current_platform(true),
Arc::new(()),
http_client::client(None, None),
Arc::new(NullHttpClient),
))
}
@ -142,6 +142,14 @@ impl App {
self
}
/// Set the http client for the application
pub fn with_http_client(self, http_client: Arc<dyn HttpClient>) -> Self {
let mut context_lock = self.0.borrow_mut();
context_lock.http_client = http_client;
drop(context_lock);
self
}
/// Start the application. The provided callback will be called once the
/// app is fully launched.
pub fn run<F>(self, on_finish_launching: F)
@ -1512,3 +1520,22 @@ pub struct KeystrokeEvent {
/// The action that was resolved for the keystroke, if any
pub action: Option<Box<dyn Action>>,
}
struct NullHttpClient;
impl HttpClient for NullHttpClient {
fn send_with_redirect_policy(
&self,
_req: http_client::Request<http_client::AsyncBody>,
_follow_redirects: bool,
) -> futures::future::BoxFuture<
'static,
Result<http_client::Response<http_client::AsyncBody>, anyhow::Error>,
> {
async move { Err(anyhow!("No HttpClient available")) }.boxed()
}
fn proxy(&self) -> Option<&http_client::Uri> {
None
}
}

View File

@ -345,7 +345,10 @@ impl Asset for ImageAsset {
let bytes = match source.clone() {
UriOrPath::Path(uri) => fs::read(uri.as_ref())?,
UriOrPath::Uri(uri) => {
let mut response = client.get(uri.as_ref(), ().into(), true).await?;
let mut response = client
.get(uri.as_ref(), ().into(), true)
.await
.map_err(|e| ImageCacheError::Client(Arc::new(e)))?;
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
if !response.status().is_success() {
@ -429,7 +432,7 @@ impl Asset for ImageAsset {
pub enum ImageCacheError {
/// An error that occurred while fetching an image from a remote source.
#[error("http error: {0}")]
Client(#[from] http_client::Error),
Client(#[from] Arc<anyhow::Error>),
/// An error that occurred while reading the image from disk.
#[error("IO error: {0}")]
Io(Arc<std::io::Error>),

View File

@ -407,7 +407,11 @@ impl BackgroundExecutor {
/// How many CPUs are available to the dispatcher.
pub fn num_cpus(&self) -> usize {
num_cpus::get()
#[cfg(any(test, feature = "test-support"))]
return 4;
#[cfg(not(any(test, feature = "test-support")))]
return num_cpus::get();
}
/// Whether we're on the main thread.

View File

@ -128,6 +128,7 @@ pub use executor::*;
pub use geometry::*;
pub use global::*;
pub use gpui_macros::{register_action, test, IntoElement, Render};
pub use http_client;
pub use input::*;
pub use interactive::*;
use key_dispatch::*;

View File

@ -1063,7 +1063,7 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl {
// This `cast()` action here should never fail since we are running on Win10+, and
// `IDWriteFontFace3` requires Win10
let font_face = &font_face.cast::<IDWriteFontFace3>().unwrap();
let Some((font_identifier, font_struct, is_emoji)) =
let Some((font_identifier, font_struct, color_font)) =
get_font_identifier_and_font_struct(font_face, &self.locale)
else {
return Ok(());
@ -1084,6 +1084,8 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl {
context
.index_converter
.advance_to_utf16_ix(context.utf16_index);
let is_emoji = color_font
&& is_color_glyph(font_face, id, &context.text_system.components.factory);
glyphs.push(ShapedGlyph {
id,
position: point(px(context.width), px(0.0)),
@ -1446,6 +1448,44 @@ fn get_render_target_property(
}
}
// One would think that with newer DirectWrite method: IDWriteFontFace4::GetGlyphImageFormats
// but that doesn't seem to work for some glyphs, say ❤
fn is_color_glyph(
font_face: &IDWriteFontFace3,
glyph_id: GlyphId,
factory: &IDWriteFactory5,
) -> bool {
let glyph_run = DWRITE_GLYPH_RUN {
fontFace: unsafe { std::mem::transmute_copy(font_face) },
fontEmSize: 14.0,
glyphCount: 1,
glyphIndices: &(glyph_id.0 as u16),
glyphAdvances: &0.0,
glyphOffsets: &DWRITE_GLYPH_OFFSET {
advanceOffset: 0.0,
ascenderOffset: 0.0,
},
isSideways: BOOL(0),
bidiLevel: 0,
};
unsafe {
factory.TranslateColorGlyphRun(
D2D_POINT_2F::default(),
&glyph_run as _,
None,
DWRITE_GLYPH_IMAGE_FORMATS_COLR
| DWRITE_GLYPH_IMAGE_FORMATS_SVG
| DWRITE_GLYPH_IMAGE_FORMATS_PNG
| DWRITE_GLYPH_IMAGE_FORMATS_JPEG
| DWRITE_GLYPH_IMAGE_FORMATS_PREMULTIPLIED_B8G8R8A8,
DWRITE_MEASURING_MODE_NATURAL,
None,
0,
)
}
.is_ok()
}
const DEFAULT_LOCALE_NAME: PCWSTR = windows::core::w!("en-US");
const BRUSH_COLOR: D2D1_COLOR_F = D2D1_COLOR_F {
r: 1.0,

Some files were not shown because too many files have changed in this diff Show More