diff --git a/.github/actions/check_style/action.yml b/.github/actions/check_style/action.yml index 7f41e710c91358..f8362bc636f938 100644 --- a/.github/actions/check_style/action.yml +++ b/.github/actions/check_style/action.yml @@ -7,9 +7,3 @@ runs: - name: cargo fmt shell: bash -euxo pipefail {0} run: cargo fmt --all -- --check - - - name: Find modified migrations - shell: bash -euxo pipefail {0} - run: | - export SQUAWK_GITHUB_TOKEN=${{ github.token }} - . ./script/squawk diff --git a/.github/actions/run_tests/action.yml b/.github/actions/run_tests/action.yml index 815953398ba5b5..07284e2f5854ac 100644 --- a/.github/actions/run_tests/action.yml +++ b/.github/actions/run_tests/action.yml @@ -10,7 +10,7 @@ runs: cargo install cargo-nextest - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index bf1667643ed5aa..e107c14470ff29 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -2,14 +2,4 @@ Closes #ISSUE Release Notes: -- Added/Fixed/Improved ... - -Optionally, include screenshots / media showcasing your addition that can be included in the release notes. - -### Or... - -Closes #ISSUE - -Release Notes: - -- N/A +- N/A *or* Added/Fixed/Improved ... diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml new file mode 100644 index 00000000000000..d0da0a94053b33 --- /dev/null +++ b/.github/workflows/bump_collab_staging.yml @@ -0,0 +1,23 @@ +name: Bump collab-staging Tag + +on: + schedule: + # Fire every day at 16:00 UTC (At the start of the US workday) + - cron: "0 16 * * *" + +jobs: + update-collab-staging-tag: + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + with: + fetch-depth: 0 + + - name: Update collab-staging tag + run: | + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f collab-staging + git push origin collab-staging --force diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index e2789a7da77349..dbd0b2b3e1f6e1 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -15,11 +15,10 @@ concurrency: jobs: bump_patch_version: runs-on: - - self-hosted - - test + - buildjet-16vcpu-ubuntu-2204 steps: - name: Checkout code - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: ref: ${{ github.event.inputs.branch }} ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }} @@ -42,7 +41,7 @@ jobs: exit 1 ;; esac - which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl + which cargo-set-version > /dev/null || cargo install cargo-edit output=$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //') git commit -am "Bump to $output for @$GITHUB_ACTOR" --author "Zed Bot " git tag v${output}${tag_suffix} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 37e80e5a8d32ce..a569a612c0d1c7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,9 +7,14 @@ on: - "v[0-9]+.[0-9]+.x" tags: - "v*" + paths-ignore: + - "docs/**" pull_request: branches: - "**" + paths-ignore: + - "docs/**" + - ".github/workflows/community_*" concurrency: # Allow only one workflow per any non-`main` branch. @@ -22,45 +27,28 @@ env: RUST_BACKTRACE: 1 jobs: - style: + migration_checks: + name: Check Postgres and Protobuf migrations, mergability + if: github.repository_owner == 'zed-industries' timeout-minutes: 60 - name: Check formatting and spelling runs-on: - self-hosted - test steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false - fetch-depth: 0 + fetch-depth: 0 # fetch full history - name: Remove untracked files run: git clean -df - - name: Check spelling + - name: Find modified migrations + shell: bash -euxo pipefail {0} run: | - if ! cargo install --list | grep "typos-cli v$TYPOS_CLI_VERSION" > /dev/null; then - echo "Installing typos-cli@$TYPOS_CLI_VERSION..." - cargo install "typos-cli@$TYPOS_CLI_VERSION" - else - echo "typos-cli@$TYPOS_CLI_VERSION is already installed." - fi - typos - env: - TYPOS_CLI_VERSION: "1.23.3" - - - name: Run style checks - uses: ./.github/actions/check_style - - - name: Check unused dependencies - uses: bnjbvr/cargo-machete@main - - - name: Check licenses are present - run: script/check-licenses - - - name: Check license generation - run: script/generate-licenses /tmp/zed_licenses_output + export SQUAWK_GITHUB_TOKEN=${{ github.token }} + . ./script/squawk - name: Ensure fresh merge shell: bash -euxo pipefail {0} @@ -82,6 +70,24 @@ jobs: input: "crates/proto/proto/" against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/" + style: + timeout-minutes: 60 + name: Check formatting and spelling + if: github.repository_owner == 'zed-industries' + runs-on: + - buildjet-8vcpu-ubuntu-2204 + steps: + - name: Checkout repo + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + + - name: Run style checks + uses: ./.github/actions/check_style + + - name: Check for typos + uses: crate-ci/typos@v1.24.6 + with: + config: ./typos.toml + macos_tests: timeout-minutes: 60 name: (macOS) Run Clippy and tests @@ -90,37 +96,56 @@ jobs: - test steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false - name: cargo clippy run: ./script/clippy + - name: Check unused dependencies + uses: bnjbvr/cargo-machete@main + + - name: Check licenses + run: | + script/check-licenses + script/generate-licenses /tmp/zed_licenses_output + - name: Run tests uses: ./.github/actions/run_tests - name: Build collab - run: cargo build -p collab + run: RUSTFLAGS="-D warnings" cargo build -p collab - name: Build other binaries and features - run: cargo build --workspace --bins --all-features; cargo check -p gpui --features "macos-blade" + run: | + RUSTFLAGS="-D warnings" cargo build --workspace --bins --all-features + cargo check -p gpui --features "macos-blade" + RUSTFLAGS="-D warnings" cargo build -p remote_server linux_tests: timeout-minutes: 60 name: (Linux) Run Clippy and tests runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false + - name: Cache dependencies + uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "buildjet" + + - name: Install Linux dependencies + run: ./script/linux + - name: cargo clippy run: ./script/clippy @@ -128,7 +153,33 @@ jobs: uses: ./.github/actions/run_tests - name: Build Zed - run: cargo build -p zed + run: RUSTFLAGS="-D warnings" cargo build -p zed + + build_remote_server: + timeout-minutes: 60 + name: (Linux) Build Remote Server + runs-on: + - buildjet-16vcpu-ubuntu-2204 + steps: + - name: Add Rust to the PATH + run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + + - name: Checkout repo + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + with: + clean: false + + - name: Cache dependencies + uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "buildjet" + + - name: Install Clang & Mold + run: ./script/remote-server && ./script/install-mold 2.34.0 + + - name: Build Remote Server + run: RUSTFLAGS="-D warnings" cargo build -p remote_server # todo(windows): Actually run the tests windows_tests: @@ -137,7 +188,7 @@ jobs: runs-on: hosted-windows-1 steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false @@ -145,13 +196,14 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "github" - name: cargo clippy # Windows can't run shell scripts, so we need to use `cargo xtask`. run: cargo xtask clippy - name: Build Zed - run: cargo build -p zed + run: $env:RUSTFLAGS="-D warnings"; cargo build bundle-mac: timeout-minutes: 60 @@ -172,12 +224,12 @@ jobs: DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} steps: - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: # We need to fetch more than one commit so that `script/draft-release-notes` # is able to diff between the current and previous tag. @@ -192,29 +244,12 @@ jobs: - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | - set -eu - - version=$(script/get-crate-version zed) - channel=$(cat crates/zed/RELEASE_CHANNEL) - echo "Publishing version: ${version} on release channel ${channel}" - echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV - - expected_tag_name="" - case ${channel} in - stable) - expected_tag_name="v${version}";; - preview) - expected_tag_name="v${version}-pre";; - nightly) - expected_tag_name="v${version}-nightly";; - *) - echo "can't publish a release on channel ${channel}" - exit 1;; - esac - if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then - echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" - exit 1 - fi + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel + + - name: Draft release notes + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + run: | mkdir -p target/ # Ignore any errors that occur while drafting release notes to not fail the build. script/draft-release-notes "$version" "$channel" > target/release-notes.md || true @@ -232,20 +267,20 @@ jobs: mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg - name: Upload app bundle (universal) to workflow run if main branch or specific label - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} with: name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg path: target/release/Zed.dmg - name: Upload app bundle (aarch64) to workflow run if main branch or specific label - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} with: name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg - name: Upload app bundle (x86_64) to workflow run if main branch or specific label - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} with: name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg @@ -271,57 +306,32 @@ jobs: timeout-minutes: 60 name: Create a Linux bundle runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2004 if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} needs: [linux_tests] env: ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH - - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false - - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 100 + - name: Install Linux dependencies + run: ./script/linux && ./script/install-mold 2.34.0 - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | - set -eu - - version=$(script/get-crate-version zed) - channel=$(cat crates/zed/RELEASE_CHANNEL) - echo "Publishing version: ${version} on release channel ${channel}" - echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV - - expected_tag_name="" - case ${channel} in - stable) - expected_tag_name="v${version}";; - preview) - expected_tag_name="v${version}-pre";; - nightly) - expected_tag_name="v${version}-nightly";; - *) - echo "can't publish a release on channel ${channel}" - exit 1;; - esac - if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then - echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" - exit 1 - fi + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel - name: Create Linux .tar.gz bundle run: script/bundle-linux - name: Upload Linux bundle to workflow run if main branch or specific label - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} with: name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz @@ -339,11 +349,11 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - bundle-linux-aarch64: + bundle-linux-aarch64: # this runs on ubuntu22.04 timeout-minutes: 60 name: Create arm64 Linux bundle runs-on: - - hosted-linux-arm-1 + - buildjet-16vcpu-ubuntu-2204-arm if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} needs: [linux_tests] env: @@ -351,62 +361,24 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false - - name: "Setup jq" - uses: dcarbone/install-jq-action@8867ddb4788346d7c22b72ea2e2ffe4d514c7bcb # v2 - - - name: Set up Clang - run: | - sudo apt-get update - sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev - echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH - - - uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1 - with: - mold-version: 2.32.0 - - name: rustup - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y - echo "$HOME/.cargo/bin" >> $GITHUB_PATH - - - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 100 + - name: Install Linux dependencies + run: ./script/linux - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | - set -eu - - version=$(script/get-crate-version zed) - channel=$(cat crates/zed/RELEASE_CHANNEL) - echo "Publishing version: ${version} on release channel ${channel}" - echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV - - expected_tag_name="" - case ${channel} in - stable) - expected_tag_name="v${version}";; - preview) - expected_tag_name="v${version}-pre";; - nightly) - expected_tag_name="v${version}-nightly";; - *) - echo "can't publish a release on channel ${channel}" - exit 1;; - esac - if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then - echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" - exit 1 - fi + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel - name: Create and upload Linux .tar.gz bundle run: script/bundle-linux - name: Upload Linux bundle to workflow run if main branch or specific label - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} with: name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz diff --git a/.github/workflows/community_close_stale_issues.yml b/.github/workflows/community_close_stale_issues.yml new file mode 100644 index 00000000000000..8689c0db0bedc4 --- /dev/null +++ b/.github/workflows/community_close_stale_issues.yml @@ -0,0 +1,31 @@ +name: "Close Stale Issues" +on: + schedule: + - cron: "0 11 * * 2" + workflow_dispatch: + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-issue-message: > + Hi there! 👋 + + We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in 7 days. Feel free to open a new issue if you're seeing this message after the issue has been closed. + + Thanks for your help! + close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please open a new issue with a link to this issue." + # We will increase `days-before-stale` to 365 on or after Jan 24th, + # 2024. This date marks one year since migrating issues from + # 'community' to 'zed' repository. The migration added activity to all + # issues, preventing 365 days from working until then. + days-before-stale: 180 + days-before-close: 7 + any-of-issue-labels: "defect,panic / crash" + operations-per-run: 1000 + ascending: true + enable-statistics: true + stale-issue-label: "stale" diff --git a/.github/workflows/delete_comments.yml b/.github/workflows/community_delete_comments.yml similarity index 100% rename from .github/workflows/delete_comments.yml rename to .github/workflows/community_delete_comments.yml diff --git a/.github/workflows/release_actions.yml b/.github/workflows/community_release_actions.yml similarity index 97% rename from .github/workflows/release_actions.yml rename to .github/workflows/community_release_actions.yml index fed1d728721e89..48b33c5ec6dc9c 100644 --- a/.github/workflows/release_actions.yml +++ b/.github/workflows/community_release_actions.yml @@ -1,3 +1,5 @@ +name: Release Actions + on: release: types: [published] diff --git a/.github/workflows/community_update_all_top_ranking_issues.yml b/.github/workflows/community_update_all_top_ranking_issues.yml new file mode 100644 index 00000000000000..ecc7355511ccdc --- /dev/null +++ b/.github/workflows/community_update_all_top_ranking_issues.yml @@ -0,0 +1,25 @@ +name: Update All Top Ranking Issues + +on: + schedule: + - cron: "0 */12 * * *" + workflow_dispatch: + +jobs: + update_top_ranking_issues: + runs-on: ubuntu-latest + if: github.repository_owner == 'zed-industries' + steps: + - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + - name: Set up uv + uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3 + with: + version: "latest" + enable-cache: true + cache-dependency-glob: "script/update_top_ranking_issues/pyproject.toml" + - name: Install Python 3.13 + run: uv python install 3.13 + - name: Install dependencies + run: uv sync --project script/update_top_ranking_issues -p 3.13 + - name: Run script + run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393 diff --git a/.github/workflows/community_update_weekly_top_ranking_issues.yml b/.github/workflows/community_update_weekly_top_ranking_issues.yml new file mode 100644 index 00000000000000..aab45740cdf7a8 --- /dev/null +++ b/.github/workflows/community_update_weekly_top_ranking_issues.yml @@ -0,0 +1,25 @@ +name: Update Weekly Top Ranking Issues + +on: + schedule: + - cron: "0 15 * * *" + workflow_dispatch: + +jobs: + update_top_ranking_issues: + runs-on: ubuntu-latest + if: github.repository_owner == 'zed-industries' + steps: + - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + - name: Set up uv + uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3 + with: + version: "latest" + enable-cache: true + cache-dependency-glob: "script/update_top_ranking_issues/pyproject.toml" + - name: Install Python 3.13 + run: uv python install 3.13 + - name: Install dependencies + run: uv sync --project script/update_top_ranking_issues -p 3.13 + - name: Run script + run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7 diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml index 8ff35b9e26da59..2828cb42eab51f 100644 --- a/.github/workflows/danger.yml +++ b/.github/workflows/danger.yml @@ -14,14 +14,14 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 - uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0 with: version: 9 - name: Setup Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "20" cache: "pnpm" diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index 5cf4d6fd13140e..1c9e7bc5b08bba 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -8,11 +8,12 @@ on: jobs: deploy-docs: name: Deploy Docs + if: github.repository_owner == 'zed-industries' runs-on: ubuntu-latest steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false @@ -36,28 +37,28 @@ jobs: mdbook build ./docs --dest-dir=../target/deploy/docs/ - name: Deploy Docs - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@9681c2997648301493e78cacbfb790a9f19c833f # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: pages deploy target/deploy --project-name=docs - name: Deploy Install - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@9681c2997648301493e78cacbfb790a9f19c833f # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh - name: Deploy Docs Workers - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@9681c2997648301493e78cacbfb790a9f19c833f # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: deploy .cloudflare/docs-proxy/src/worker.js - name: Deploy Install Workers - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@9681c2997648301493e78cacbfb790a9f19c833f # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 4555698ecd5690..0eaf1697df2f62 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -8,7 +8,6 @@ on: env: DOCKER_BUILDKIT: 1 - DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} jobs: style: @@ -18,7 +17,7 @@ jobs: - test steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false fetch-depth: 0 @@ -37,7 +36,7 @@ jobs: needs: style steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false fetch-depth: 0 @@ -61,25 +60,27 @@ jobs: - style - tests runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install doctl + uses: digitalocean/action-doctl@v2 + with: + token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} - name: Sign into DigitalOcean docker registry run: doctl registry login - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false - - name: Set up default .cargo/config.toml - run: cp ./.cargo/collab-config.toml ./.cargo/config.toml - - name: Build docker image - run: docker build . --build-arg GITHUB_SHA=$GITHUB_SHA --tag registry.digitalocean.com/zed/collab:$GITHUB_SHA + run: | + docker build -f Dockerfile-collab \ + --build-arg GITHUB_SHA=$GITHUB_SHA \ + --tag registry.digitalocean.com/zed/collab:$GITHUB_SHA \ + . - name: Publish docker image run: docker push registry.digitalocean.com/zed/collab:${GITHUB_SHA} @@ -92,10 +93,19 @@ jobs: needs: - publish runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: + - name: Checkout repo + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + with: + clean: false + + - name: Install doctl + uses: digitalocean/action-doctl@v2 + with: + token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} + - name: Sign into Kubernetes run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index be0f3c5a823524..437e7f96a678f4 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -11,14 +11,27 @@ on: jobs: check_formatting: name: "Check formatting" + if: github.repository_owner == 'zed-industries' runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 - uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0 with: version: 9 - - run: pnpm dlx prettier . --check + - name: Prettier Check on /docs working-directory: ./docs + run: | + pnpm dlx prettier . --check || { + echo "To fix, run from the root of the zed repo:" + echo " cd docs && pnpm dlx prettier . --write && cd .." + false + } + + - name: Check for Typos with Typos-CLI + uses: crate-ci/typos@v1.24.6 + with: + config: ./typos.toml + files: ./docs/ diff --git a/.github/workflows/publish_extension_cli.yml b/.github/workflows/publish_extension_cli.yml index 698a09ad007f88..e222a1cdaa738b 100644 --- a/.github/workflows/publish_extension_cli.yml +++ b/.github/workflows/publish_extension_cli.yml @@ -16,7 +16,7 @@ jobs: - ubuntu-latest steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false @@ -24,6 +24,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "github" - name: Configure linux shell: bash -euxo pipefail {0} diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml index 8b628fe5a2df18..af604e6abbfd7c 100644 --- a/.github/workflows/randomized_tests.yml +++ b/.github/workflows/randomized_tests.yml @@ -19,16 +19,15 @@ jobs: tests: name: Run randomized tests runs-on: - - self-hosted - - randomized-tests + - buildjet-16vcpu-ubuntu-2204 steps: - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index e1fc5a39f2eff8..9d5581ebdcf220 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -23,7 +23,7 @@ jobs: - test steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false fetch-depth: 0 @@ -44,7 +44,7 @@ jobs: needs: style steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false @@ -70,12 +70,12 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false @@ -100,8 +100,7 @@ jobs: name: Create a Linux *.tar.gz bundle for x86 if: github.repository_owner == 'zed-industries' runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2004 needs: tests env: DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} @@ -110,13 +109,19 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install Linux dependencies + run: ./script/linux && ./script/install-mold 2.34.0 + + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 100 + - name: Set release channel to nightly run: | set -euo pipefail @@ -144,27 +149,12 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 with: clean: false - - name: "Setup jq" - uses: dcarbone/install-jq-action@8867ddb4788346d7c22b72ea2e2ffe4d514c7bcb # v2 - - - name: Set up Clang - run: | - sudo apt-get update - sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev - echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH - - - uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1 - with: - mold-version: 2.32.0 - - - name: rustup - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y - echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install Linux dependencies + run: ./script/linux - name: Limit target directory size run: script/clear-target-dir-if-larger-than 100 @@ -181,3 +171,28 @@ jobs: - name: Upload Zed Nightly run: script/upload-nightly linux-targz + + update-nightly-tag: + name: Update nightly tag + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + needs: + - bundle-mac + - bundle-linux-x86 + - bundle-linux-arm + steps: + - name: Checkout repo + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + with: + fetch-depth: 0 + + - name: Update nightly tag + run: | + if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then + echo "Nightly tag already points to current commit. Skipping tagging." + exit 0 + fi + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f nightly + git push origin nightly --force diff --git a/.github/workflows/update_all_top_ranking_issues.yml b/.github/workflows/update_all_top_ranking_issues.yml deleted file mode 100644 index 6a5ccbcc05d6d3..00000000000000 --- a/.github/workflows/update_all_top_ranking_issues.yml +++ /dev/null @@ -1,18 +0,0 @@ -on: - schedule: - - cron: "0 */12 * * *" - workflow_dispatch: - -jobs: - update_top_ranking_issues: - runs-on: ubuntu-latest - if: github.repository_owner == 'zed-industries' - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 - - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5 - with: - python-version: "3.11" - architecture: "x64" - cache: "pip" - - run: pip install -r script/update_top_ranking_issues/requirements.txt - - run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393 diff --git a/.github/workflows/update_weekly_top_ranking_issues.yml b/.github/workflows/update_weekly_top_ranking_issues.yml deleted file mode 100644 index 40aa3a7ff812c7..00000000000000 --- a/.github/workflows/update_weekly_top_ranking_issues.yml +++ /dev/null @@ -1,18 +0,0 @@ -on: - schedule: - - cron: "0 15 * * *" - workflow_dispatch: - -jobs: - update_top_ranking_issues: - runs-on: ubuntu-latest - if: github.repository_owner == 'zed-industries' - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 - - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5 - with: - python-version: "3.11" - architecture: "x64" - cache: "pip" - - run: pip install -r script/update_top_ranking_issues/requirements.txt - - run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7 diff --git a/.gitignore b/.gitignore index 634b73ac943cc3..d19c5a102aac8a 100644 --- a/.gitignore +++ b/.gitignore @@ -10,7 +10,7 @@ /crates/collab/seed.json /crates/zed/resources/flatpak/flatpak-cargo-sources.json /dev.zed.Zed*.json -/assets/*licenses.md +/assets/*licenses.* **/venv .build *.wasm diff --git a/.zed/settings.json b/.zed/settings.json index 176fd33a9b966d..41adfdbf591d36 100644 --- a/.zed/settings.json +++ b/.zed/settings.json @@ -38,6 +38,10 @@ } } }, + "file_types": { + "Dockerfile": ["Dockerfile*[!dockerignore]"], + "Git Ignore": ["dockerignore"] + }, "hard_tabs": false, "formatter": "auto", "remove_trailing_whitespace_on_save": true, diff --git a/Cargo.lock b/Cargo.lock index 30661345d6909d..b9b75adba9eb1a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21,11 +21,11 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" dependencies = [ - "gimli 0.29.0", + "gimli 0.31.0", ] [[package]] @@ -245,13 +245,11 @@ dependencies = [ "chrono", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", "strum 0.25.0", "thiserror", - "tokio", "util", ] @@ -263,9 +261,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4" [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" [[package]] name = "approx" @@ -304,6 +302,9 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +dependencies = [ + "serde", +] [[package]] name = "as-raw-xcb-connection" @@ -339,9 +340,9 @@ dependencies = [ [[package]] name = "ashpd" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfe7e0dd0ac5a401dc116ed9f9119cf9decc625600474cb41f0fc0a0050abc9a" +checksum = "4d43c03d9e36dd40cab48435be0b09646da362c278223ca535493877b2c1dee9" dependencies = [ "async-fs 2.1.2", "async-net 2.0.0", @@ -393,13 +394,15 @@ dependencies = [ "gpui", "handlebars 4.5.0", "heed", - "html_to_markdown 0.1.0", + "html_to_markdown", "http_client", "indexed_docs", "indoc", "language", "language_model", + "languages", "log", + "lsp", "markdown", "menu", "multi_buffer", @@ -409,12 +412,14 @@ dependencies = [ "parking_lot", "paths", "picker", + "pretty_assertions", "project", "proto", "rand 0.8.5", "regex", "release_channel", "rope", + "rpc", "schemars", "search", "semantic_index", @@ -432,6 +437,7 @@ dependencies = [ "text", "theme", "toml 0.8.19", + "tree-sitter-md", "ui", "unindent", "util", @@ -451,6 +457,7 @@ dependencies = [ "language", "parking_lot", "serde", + "serde_json", "workspace", ] @@ -528,9 +535,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fec134f64e2bc57411226dfc4e52dec859ddfc7e711fc5e07b612584f000e4aa" +checksum = "7e614738943d3f68c628ae3dbce7c3daffb196665f82f8c8ea6b65de73c79429" dependencies = [ "deflate64", "flate2", @@ -832,15 +839,14 @@ dependencies = [ [[package]] name = "async-stripe" -version = "0.39.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58d670cf4d47a1b8ffef54286a5625382e360a34ee76902fd93ad8c7032a0c30" +version = "0.40.0" +source = "git+https://github.com/zed-industries/async-stripe?rev=3672dd4efb7181aa597bf580bf5a2f5d23db6735#3672dd4efb7181aa597bf580bf5a2f5d23db6735" dependencies = [ "chrono", "futures-util", "http-types", - "hyper", - "hyper-rustls", + "hyper 0.14.30", + "hyper-rustls 0.24.2", "serde", "serde_json", "serde_path_to_error", @@ -871,11 +877,25 @@ version = "4.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" +[[package]] +name = "async-tls" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfeefd0ca297cbbb3bd34fd6b228401c2a5177038257afd751bc29f0a2da4795" +dependencies = [ + "futures-core", + "futures-io", + "rustls 0.20.9", + "rustls-pemfile 1.0.4", + "webpki", + "webpki-roots 0.22.6", +] + [[package]] name = "async-trait" -version = "0.1.81" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", @@ -884,17 +904,17 @@ dependencies = [ [[package]] name = "async-tungstenite" -version = "0.23.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc" +checksum = "3609af4bbf701ddaf1f6bb4e6257dff4ff8932327d0e685d3f653724c258b1ac" dependencies = [ - "async-native-tls", "async-std", + "async-tls", "futures-io", "futures-util", "log", "pin-project-lite", - "tungstenite 0.20.1", + "tungstenite 0.21.0", ] [[package]] @@ -925,7 +945,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "futures-sink", "futures-util", "memchr", @@ -976,7 +996,6 @@ dependencies = [ "editor", "gpui", "http_client", - "isahc", "log", "markdown_preview", "menu", @@ -1040,11 +1059,11 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.7.1", + "bytes 1.7.2", "fastrand 2.1.1", "hex", "http 0.2.12", - "ring", + "ring 0.17.8", "time", "tokio", "tracing", @@ -1079,7 +1098,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.7.1", + "bytes 1.7.2", "fastrand 2.1.1", "http 0.2.12", "http-body 0.4.6", @@ -1110,7 +1129,7 @@ dependencies = [ "aws-smithy-types", "aws-smithy-xml", "aws-types", - "bytes 1.7.1", + "bytes 1.7.2", "fastrand 2.1.1", "hex", "hmac", @@ -1140,7 +1159,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.7.1", + "bytes 1.7.2", "http 0.2.12", "once_cell", "regex-lite", @@ -1162,7 +1181,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.7.1", + "bytes 1.7.2", "http 0.2.12", "once_cell", "regex-lite", @@ -1203,7 +1222,7 @@ dependencies = [ "aws-smithy-http", "aws-smithy-runtime-api", "aws-smithy-types", - "bytes 1.7.1", + "bytes 1.7.2", "crypto-bigint 0.5.5", "form_urlencoded", "hex", @@ -1213,7 +1232,7 @@ dependencies = [ "once_cell", "p256", "percent-encoding", - "ring", + "ring 0.17.8", "sha2", "subtle", "time", @@ -1240,7 +1259,7 @@ checksum = "598b1689d001c4d4dc3cb386adb07d37786783aee3ac4b324bcadac116bf3d23" dependencies = [ "aws-smithy-http", "aws-smithy-types", - "bytes 1.7.1", + "bytes 1.7.2", "crc32c", "crc32fast", "hex", @@ -1260,7 +1279,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6363078f927f612b970edf9d1903ef5cef9a64d1e8423525ebb1f0a1633c858" dependencies = [ "aws-smithy-types", - "bytes 1.7.1", + "bytes 1.7.2", "crc32fast", ] @@ -1273,7 +1292,7 @@ dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", "aws-smithy-types", - "bytes 1.7.1", + "bytes 1.7.2", "bytes-utils", "futures-core", "http 0.2.12", @@ -1314,19 +1333,19 @@ dependencies = [ "aws-smithy-http", "aws-smithy-runtime-api", "aws-smithy-types", - "bytes 1.7.1", + "bytes 1.7.2", "fastrand 2.1.1", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "http-body 1.0.1", "httparse", - "hyper", - "hyper-rustls", + "hyper 0.14.30", + "hyper-rustls 0.24.2", "once_cell", "pin-project-lite", "pin-utils", - "rustls", + "rustls 0.21.12", "tokio", "tracing", ] @@ -1339,7 +1358,7 @@ checksum = "e086682a53d3aa241192aa110fa8dfce98f2f5ac2ead0de84d41582c7e8fdb96" dependencies = [ "aws-smithy-async", "aws-smithy-types", - "bytes 1.7.1", + "bytes 1.7.2", "http 0.2.12", "http 1.1.0", "pin-project-lite", @@ -1355,7 +1374,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "273dcdfd762fae3e1650b8024624e7cd50e484e37abdab73a7a706188ad34543" dependencies = [ "base64-simd", - "bytes 1.7.1", + "bytes 1.7.2", "bytes-utils", "futures-core", "http 0.2.12", @@ -1407,12 +1426,12 @@ dependencies = [ "axum-core", "base64 0.21.7", "bitflags 1.3.2", - "bytes 1.7.1", + "bytes 1.7.2", "futures-util", "headers", "http 0.2.12", "http-body 0.4.6", - "hyper", + "hyper 0.14.30", "itoa", "matchit", "memchr", @@ -1425,7 +1444,7 @@ dependencies = [ "serde_path_to_error", "serde_urlencoded", "sha1", - "sync_wrapper", + "sync_wrapper 0.1.2", "tokio", "tokio-tungstenite 0.20.1", "tower", @@ -1440,7 +1459,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", - "bytes 1.7.1", + "bytes 1.7.2", "futures-util", "http 0.2.12", "http-body 0.4.6", @@ -1457,7 +1476,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9a320103719de37b7b4da4c8eb629d4573f6bcfd3dfe80d3208806895ccf81d" dependencies = [ "axum", - "bytes 1.7.1", + "bytes 1.7.2", "futures-util", "http 0.2.12", "mime", @@ -1473,17 +1492,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.7.4", - "object 0.36.4", + "miniz_oxide 0.8.0", + "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -1564,7 +1583,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.76", ] @@ -1584,7 +1603,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.76", ] @@ -1709,6 +1728,19 @@ dependencies = [ "profiling", ] +[[package]] +name = "blake3" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", +] + [[package]] name = "block" version = "0.1.6" @@ -1878,9 +1910,9 @@ dependencies = [ [[package]] name = "bytes" -version = "1.7.1" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" +checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" [[package]] name = "bytes-utils" @@ -1888,7 +1920,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "either", ] @@ -2053,9 +2085,9 @@ dependencies = [ [[package]] name = "cargo_toml" -version = "0.20.4" +version = "0.20.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad639525b1c67b6a298f378417b060fbc04618bea559482a8484381cce27d965" +checksum = "88da5a13c620b4ca0078845707ea9c3faf11edbc3ffd8497d11d686211cd1ac0" dependencies = [ "serde", "toml 0.8.19", @@ -2067,12 +2099,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" -[[package]] -name = "castaway" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" - [[package]] name = "cbc" version = "0.1.2" @@ -2249,9 +2275,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.16" +version = "4.5.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" +checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" dependencies = [ "clap_builder", "clap_derive", @@ -2259,9 +2285,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.15" +version = "4.5.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" dependencies = [ "anstream", "anstyle", @@ -2281,9 +2307,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.13" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ "heck 0.5.0", "proc-macro2", @@ -2326,11 +2352,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0875e527e299fc5f4faba42870bf199a39ab0bb2dbba1b8aef0a2151451130f" dependencies = [ "bstr", - "bytes 1.7.1", + "bytes 1.7.2", "clickhouse-derive", "clickhouse-rs-cityhash-sys", "futures 0.3.30", - "hyper", + "hyper 0.14.30", "hyper-tls", "lz4", "sealed", @@ -2375,7 +2401,6 @@ dependencies = [ "cocoa 0.26.0", "collections", "feature_flags", - "fs", "futures 0.3.30", "gpui", "http_client", @@ -2387,6 +2412,8 @@ dependencies = [ "rand 0.8.5", "release_channel", "rpc", + "rustls 0.20.9", + "rustls-native-certs 0.8.0", "schemars", "serde", "serde_json", @@ -2519,6 +2546,7 @@ dependencies = [ "collections", "ctor", "dashmap 6.0.1", + "derive_more", "dev_server_projects", "editor", "env_logger", @@ -2533,7 +2561,7 @@ dependencies = [ "headless", "hex", "http_client", - "hyper", + "hyper 0.14.30", "indoc", "jsonwebtoken", "language", @@ -2558,7 +2586,8 @@ dependencies = [ "release_channel", "remote", "remote_server", - "reqwest", + "reqwest 0.11.27", + "reqwest_client", "rpc", "rustc-demangle", "scrypt", @@ -2642,7 +2671,7 @@ dependencies = [ name = "collections" version = "0.1.0" dependencies = [ - "rustc-hash", + "rustc-hash 1.1.0", ] [[package]] @@ -2663,7 +2692,7 @@ version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "memchr", ] @@ -2752,12 +2781,19 @@ dependencies = [ "tiny-keccak", ] +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + [[package]] name = "context_servers" version = "0.1.0" dependencies = [ "anyhow", "collections", + "command_palette_hooks", "futures 0.3.30", "gpui", "log", @@ -2807,7 +2843,6 @@ dependencies = [ "gpui", "http_client", "indoc", - "isahc", "language", "lsp", "menu", @@ -2954,7 +2989,7 @@ dependencies = [ "log", "rangemap", "rayon", - "rustc-hash", + "rustc-hash 1.1.0", "rustybuzz", "self_cell", "swash", @@ -3009,74 +3044,86 @@ dependencies = [ [[package]] name = "cranelift-bforest" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29daf137addc15da6bab6eae2c4a11e274b1d270bf2759508e62f6145e863ef6" +checksum = "32d69b774780246008783a75edfb943eccc2487b6a43808503a07cd563f2ffde" dependencies = [ "cranelift-entity", ] +[[package]] +name = "cranelift-bitset" +version = "0.111.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7d8d71c6b32c1a7cff254c5e5d7359872c1e5e610fbe963472afcddbd9cf303" +dependencies = [ + "serde", + "serde_derive", +] + [[package]] name = "cranelift-codegen" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de619867d5de4c644b7fd9904d6e3295269c93d8a71013df796ab338681222d4" +checksum = "3ad3a906f2a3f3590ad9798d59a46959a8593258eb985af722f634723c063a2c" dependencies = [ "bumpalo", "cranelift-bforest", + "cranelift-bitset", "cranelift-codegen-meta", "cranelift-codegen-shared", "cranelift-control", "cranelift-entity", "cranelift-isle", - "gimli 0.28.1", + "gimli 0.29.0", "hashbrown 0.14.5", "log", "regalloc2", - "rustc-hash", + "rustc-hash 1.1.0", "smallvec", "target-lexicon", ] [[package]] name = "cranelift-codegen-meta" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29f5cf277490037d8dae9513d35e0ee8134670ae4a964a5ed5b198d4249d7c10" +checksum = "cd5e4ee12262a135efbef3ced4ab2153adafe4adc55f36af94f9d73be0f7505d" dependencies = [ "cranelift-codegen-shared", ] [[package]] name = "cranelift-codegen-shared" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3e22ecad1123343a3c09ac6ecc532bb5c184b6fcb7888df0ea953727f79924" +checksum = "5b9374a2a5f060f72e3080fe1c87c9ff4bef2cbe798faae60daf276fb1a13968" [[package]] name = "cranelift-control" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53ca3ec6d30bce84ccf59c81fead4d16381a3ef0ef75e8403bc1e7385980da09" +checksum = "fba3ca2f344bb22d265a928e7c3f5f46e1a2eb41f1393bd53538d07b6ffb5293" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eabb8d36b0ca8906bec93c78ea516741cac2d7e6b266fa7b0ffddcc09004990" +checksum = "a6aef77dfb018eed09d92d4244abe3c1c060cbbd900c24f75ddde7d75d0e781e" dependencies = [ + "cranelift-bitset", "serde", "serde_derive", ] [[package]] name = "cranelift-frontend" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44b42630229e49a8cfcae90bdc43c8c4c08f7a7aa4618b67f79265cd2f996dd2" +checksum = "7b1d6954f03d63df1cb95d66153c97df0201862220861349bbd5f583754b1917" dependencies = [ "cranelift-codegen", "log", @@ -3086,15 +3133,15 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "918d1e36361805dfe0b6cdfd5a5ffdb5d03fa796170c5717d2727cbe623b93a0" +checksum = "f8b9b7e088b784796ea8aa5947c1cc12034c1b076a077ec2a5a287da717fa746" [[package]] name = "cranelift-native" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75aea85a0d7e1800b14ce9d3f53adf8ad4d1ee8a9e23b0269bdc50285e93b9b3" +checksum = "4cab7424083d070669ff3fdeea7c5b4b5013a055aa1ee0532703f17a5f62af64" dependencies = [ "cranelift-codegen", "libc", @@ -3103,9 +3150,9 @@ dependencies = [ [[package]] name = "cranelift-wasm" -version = "0.108.1" +version = "0.111.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dac491fd3473944781f0cf9528c90cc899d18ad438da21961a839a3a44d57dfb" +checksum = "81a9f6d0495984eef1d753ec8748de0b216b37ade16d219f1c0f27d8188d7f77" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -3113,7 +3160,7 @@ dependencies = [ "itertools 0.12.1", "log", "smallvec", - "wasmparser 0.207.0", + "wasmparser 0.215.0", "wasmtime-types", ] @@ -3288,36 +3335,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "curl" -version = "0.4.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" -dependencies = [ - "curl-sys", - "libc", - "openssl-probe", - "openssl-sys", - "schannel", - "socket2 0.5.7", - "windows-sys 0.52.0", -] - -[[package]] -name = "curl-sys" -version = "0.4.74+curl-8.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8af10b986114528fcdc4b63b6f5f021b7057618411046a4de2ba0f0149a097bf" -dependencies = [ - "cc", - "libc", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", - "windows-sys 0.52.0", -] - [[package]] name = "cursor-icon" version = "1.1.0" @@ -3676,6 +3693,7 @@ dependencies = [ "multi_buffer", "ordered-float 2.10.1", "parking_lot", + "pretty_assertions", "project", "rand 0.8.5", "release_channel", @@ -3684,6 +3702,7 @@ dependencies = [ "serde", "serde_json", "settings", + "similar", "smallvec", "smol", "snippet", @@ -3784,9 +3803,9 @@ checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d" [[package]] name = "emojis" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e72f23d65b46527e461b161ab9a126c378aa2249d8a8d15718d23ab1fb4d8786" +checksum = "99e1f1df1f181f2539bac8bf027d31ca5ffbf9e559e3f2d09413b9107b5c02f4" dependencies = [ "phf", ] @@ -3962,6 +3981,34 @@ dependencies = [ "num-traits", ] +[[package]] +name = "evals" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "client", + "clock", + "collections", + "env_logger", + "feature_flags", + "fs", + "git", + "gpui", + "http_client", + "language", + "languages", + "node_runtime", + "open_ai", + "project", + "reqwest_client", + "semantic_index", + "serde", + "serde_json", + "settings", + "smol", +] + [[package]] name = "event-listener" version = "2.5.3" @@ -4044,7 +4091,6 @@ dependencies = [ "gpui", "http_client", "indexed_docs", - "isahc", "language", "log", "lsp", @@ -4053,6 +4099,7 @@ dependencies = [ "paths", "project", "release_channel", + "reqwest_client", "schemars", "semantic_version", "serde", @@ -4066,8 +4113,8 @@ dependencies = [ "ui", "url", "util", - "wasm-encoder 0.201.0", - "wasmparser 0.201.0", + "wasm-encoder 0.215.0", + "wasmparser 0.215.0", "wasmtime", "wasmtime-wasi", "wit-component", @@ -4083,9 +4130,9 @@ dependencies = [ "env_logger", "extension", "fs", - "http_client", "language", "log", + "reqwest_client", "rpc", "serde", "serde_json", @@ -4187,6 +4234,7 @@ dependencies = [ name = "feature_flags" version = "0.1.0" dependencies = [ + "futures 0.3.30", "gpui", ] @@ -4203,7 +4251,6 @@ dependencies = [ "gpui", "http_client", "human_bytes", - "isahc", "language", "log", "menu", @@ -4240,6 +4287,7 @@ dependencies = [ "ctor", "editor", "env_logger", + "file_icons", "futures 0.3.30", "fuzzy", "gpui", @@ -4247,7 +4295,9 @@ dependencies = [ "menu", "picker", "project", + "schemars", "serde", + "serde_derive", "serde_json", "settings", "text", @@ -4329,7 +4379,7 @@ dependencies = [ "futures-core", "futures-sink", "nanorand", - "spin", + "spin 0.9.8", ] [[package]] @@ -4778,9 +4828,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" dependencies = [ "fallible-iterator", "indexmap 2.4.0", @@ -4789,9 +4839,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" [[package]] name = "git" @@ -4844,7 +4894,6 @@ dependencies = [ "git", "gpui", "http_client", - "isahc", "pretty_assertions", "regex", "serde", @@ -4861,9 +4910,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" dependencies = [ "aho-corasick", "bstr", @@ -4928,7 +4977,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -5084,7 +5132,7 @@ version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "fnv", "futures-core", "futures-sink", @@ -5097,6 +5145,25 @@ dependencies = [ "tracing", ] +[[package]] +name = "h2" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +dependencies = [ + "atomic-waker", + "bytes 1.7.2", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", + "indexmap 2.4.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "2.4.1" @@ -5161,6 +5228,7 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash 0.8.11", "allocator-api2", + "serde", ] [[package]] @@ -5179,7 +5247,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ "base64 0.21.7", - "bytes 1.7.1", + "bytes 1.7.2", "headers-core", "http 0.2.12", "httpdate", @@ -5211,7 +5279,7 @@ dependencies = [ "node_runtime", "postage", "project", - "rpc", + "proto", "settings", "shellexpand 2.1.2", "signal-hook", @@ -5406,25 +5474,13 @@ dependencies = [ "regex", ] -[[package]] -name = "html_to_markdown" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e608e8dd0939bfb6b516d96a5919751b835297a02230aecb88d2fc84ebebaa8a" -dependencies = [ - "anyhow", - "html5ever", - "markup5ever_rcdom", - "regex", -] - [[package]] name = "http" version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "fnv", "itoa", ] @@ -5435,7 +5491,7 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "fnv", "itoa", ] @@ -5446,7 +5502,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "http 0.2.12", "pin-project-lite", ] @@ -5457,7 +5513,7 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "http 1.1.0", ] @@ -5467,7 +5523,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "futures-util", "http 1.1.0", "http-body 1.0.1", @@ -5506,11 +5562,10 @@ name = "http_client" version = "0.1.0" dependencies = [ "anyhow", + "bytes 1.7.2", "derive_more", "futures 0.3.30", - "futures-lite 1.13.0", "http 1.1.0", - "isahc", "log", "serde", "serde_json", @@ -5547,11 +5602,11 @@ version = "0.14.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "futures-channel", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "httparse", @@ -5565,6 +5620,26 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes 1.7.2", + "futures-channel", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + [[package]] name = "hyper-rustls" version = "0.24.2" @@ -5573,12 +5648,30 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper", + "hyper 0.14.30", "log", - "rustls", - "rustls-native-certs", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.4.1", + "hyper-util", + "rustls 0.23.13", + "rustls-native-certs 0.8.0", + "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", + "tower-service", ] [[package]] @@ -5587,13 +5680,32 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.7.1", - "hyper", + "bytes 1.7.2", + "hyper 0.14.30", "native-tls", "tokio", "tokio-native-tls", ] +[[package]] +name = "hyper-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +dependencies = [ + "bytes 1.7.2", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "hyper 1.4.1", + "pin-project-lite", + "socket2 0.5.7", + "tokio", + "tower-service", + "tracing", +] + [[package]] name = "iana-time-zone" version = "0.1.60" @@ -5635,9 +5747,9 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" +checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" dependencies = [ "crossbeam-deque", "globset", @@ -5722,7 +5834,7 @@ dependencies = [ "fuzzy", "gpui", "heed", - "html_to_markdown 0.1.0", + "html_to_markdown", "http_client", "indexmap 1.9.3", "indoc", @@ -5902,9 +6014,9 @@ dependencies = [ [[package]] name = "ipc-channel" -version = "0.18.2" +version = "0.18.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e46231d1db8ea8f874012b1b87efb9e968f763c577220372a9c7caadce1448da" +checksum = "c7f4c80f2df4fc64fb7fc2cff69fc034af26e6e6617ea9f1313131af464b9ca0" dependencies = [ "bincode", "crossbeam-channel", @@ -5916,7 +6028,7 @@ dependencies = [ "serde", "tempfile", "uuid", - "windows 0.48.0", + "windows 0.58.0", ] [[package]] @@ -5961,33 +6073,6 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" -[[package]] -name = "isahc" -version = "1.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" -dependencies = [ - "async-channel 1.9.0", - "castaway", - "crossbeam-utils", - "curl", - "curl-sys", - "encoding_rs", - "event-listener 2.5.3", - "futures-lite 1.13.0", - "http 0.2.12", - "log", - "mime", - "once_cell", - "polling 2.8.0", - "slab", - "sluice", - "tracing", - "tracing-futures", - "url", - "waker-fn", -] - [[package]] name = "itertools" version = "0.10.5" @@ -6092,7 +6177,7 @@ dependencies = [ "base64 0.21.7", "js-sys", "pem", - "ring", + "ring 0.17.8", "serde", "serde_json", "simple_asn1", @@ -6238,6 +6323,7 @@ dependencies = [ "strum 0.25.0", "text", "theme", + "thiserror", "tiktoken-rs", "ui", "unindent", @@ -6304,7 +6390,6 @@ dependencies = [ "node_runtime", "paths", "project", - "protols-tree-sitter-proto", "regex", "rope", "rust-embed", @@ -6321,6 +6406,7 @@ dependencies = [ "tree-sitter-c", "tree-sitter-cpp", "tree-sitter-css", + "tree-sitter-diff", "tree-sitter-go", "tree-sitter-gomod", "tree-sitter-gowork", @@ -6343,7 +6429,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin", + "spin 0.9.8", ] [[package]] @@ -6366,9 +6452,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" [[package]] name = "libdbus-sys" @@ -6538,7 +6624,7 @@ dependencies = [ "prost", "prost-build", "prost-types", - "reqwest", + "reqwest 0.12.8", "serde", ] @@ -6987,7 +7073,6 @@ dependencies = [ "ctor", "env_logger", "futures 0.3.30", - "git", "gpui", "itertools 0.13.0", "language", @@ -7023,7 +7108,7 @@ dependencies = [ "hexf-parse", "indexmap 2.4.0", "log", - "rustc-hash", + "rustc-hash 1.1.0", "spirv", "termcolor", "thiserror", @@ -7122,6 +7207,7 @@ dependencies = [ "async-std", "async-tar", "async-trait", + "async-watch", "async_zip", "futures 0.3.30", "http_client", @@ -7134,6 +7220,7 @@ dependencies = [ "tempfile", "util", "walkdir", + "which 6.0.3", "windows 0.58.0", ] @@ -7414,9 +7501,9 @@ dependencies = [ [[package]] name = "object" -version = "0.33.0" +version = "0.36.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8dd6c0cdf9429bce006e1362bfce61fa1bfd8c898a643ed8d2b471934701d3d" +checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" dependencies = [ "crc32fast", "hashbrown 0.14.5", @@ -7424,15 +7511,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "object" -version = "0.36.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" -dependencies = [ - "memchr", -] - [[package]] name = "oboe" version = "0.6.1" @@ -7463,7 +7541,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -7532,7 +7609,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -7788,9 +7864,9 @@ dependencies = [ [[package]] name = "parking" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" @@ -7905,7 +7981,7 @@ name = "perplexity" version = "0.1.0" dependencies = [ "serde", - "zed_extension_api 0.1.0", + "zed_extension_api 0.2.0", ] [[package]] @@ -8298,9 +8374,9 @@ dependencies = [ [[package]] name = "pretty_assertions" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", @@ -8351,9 +8427,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a" dependencies = [ "unicode-ident", ] @@ -8429,6 +8505,7 @@ dependencies = [ "terminal", "text", "unindent", + "url", "util", "which 6.0.3", "windows 0.58.0", @@ -8508,7 +8585,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "prost-derive", ] @@ -8518,7 +8595,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "heck 0.3.3", "itertools 0.10.5", "lazy_static", @@ -8551,7 +8628,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "prost", ] @@ -8561,9 +8638,6 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "futures 0.3.30", - "gpui", - "parking_lot", "prost", "prost-build", "serde", @@ -8576,20 +8650,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" [[package]] -name = "protols-tree-sitter-proto" -version = "0.2.0" +name = "psm" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bac092da66e21a71eb832925de7b542f8ac34f75fd05cd2fc3e272863e3fd2b" -dependencies = [ - "cc", - "tree-sitter", -] - -[[package]] -name = "psm" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" +checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" dependencies = [ "cc", ] @@ -8693,6 +8757,54 @@ dependencies = [ "zed_actions", ] +[[package]] +name = "quinn" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c7c5fdde3cdae7203427dc4f0a68fe0ed09833edc525a03456b153b79828684" +dependencies = [ + "bytes 1.7.2", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.0.0", + "rustls 0.23.13", + "socket2 0.5.7", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" +dependencies = [ + "bytes 1.7.2", + "rand 0.8.5", + "ring 0.17.8", + "rustc-hash 2.0.0", + "rustls 0.23.13", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" +dependencies = [ + "libc", + "once_cell", + "socket2 0.5.7", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "quote" version = "1.0.37" @@ -8891,14 +9003,16 @@ dependencies = [ "client", "dev_server_projects", "editor", + "file_finder", "futures 0.3.30", "fuzzy", "gpui", + "itertools 0.13.0", "language", "log", - "markdown", "menu", "ordered-float 2.10.1", + "paths", "picker", "project", "release_channel", @@ -8912,7 +9026,6 @@ dependencies = [ "task", "terminal_view", "ui", - "ui_input", "util", "workspace", ] @@ -8970,7 +9083,7 @@ checksum = "ad156d539c879b7a24a363a2016d77961786e71f48f2e2fc8302a92abd2429a6" dependencies = [ "hashbrown 0.13.2", "log", - "rustc-hash", + "rustc-hash 1.1.0", "slice-group-by", "smallvec", ] @@ -9048,8 +9161,10 @@ dependencies = [ "rpc", "serde", "serde_json", + "shlex", "smol", "tempfile", + "thiserror", "util", ] @@ -9058,26 +9173,37 @@ name = "remote_server" version = "0.1.0" dependencies = [ "anyhow", + "async-watch", + "backtrace", "cargo_toml", + "clap", "client", "clock", "env_logger", "fs", "futures 0.3.30", + "git", + "git_hosting_providers", "gpui", "http_client", "language", + "languages", "log", + "lsp", "node_runtime", + "paths", "project", "remote", + "reqwest_client", "rpc", + "rust-embed", "serde", "serde_json", "settings", "shellexpand 2.1.2", "smol", "toml 0.8.19", + "util", "worktree", ] @@ -9140,14 +9266,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ "base64 0.21.7", - "bytes 1.7.1", + "bytes 1.7.2", "encoding_rs", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper", + "hyper 0.14.30", "hyper-tls", "ipnet", "js-sys", @@ -9157,12 +9283,12 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", - "system-configuration", + "sync_wrapper 0.1.2", + "system-configuration 0.5.1", "tokio", "tokio-native-tls", "tower-service", @@ -9173,6 +9299,69 @@ dependencies = [ "winreg 0.50.0", ] +[[package]] +name = "reqwest" +version = "0.12.8" +source = "git+https://github.com/zed-industries/reqwest.git?rev=fd110f6998da16bbca97b6dddda9be7827c50e29#fd110f6998da16bbca97b6dddda9be7827c50e29" +dependencies = [ + "base64 0.22.1", + "bytes 1.7.2", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", + "hyper-rustls 0.27.3", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls 0.23.13", + "rustls-native-certs 0.8.0", + "rustls-pemfile 2.1.3", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.1", + "system-configuration 0.6.1", + "tokio", + "tokio-rustls 0.26.0", + "tokio-socks", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "windows-registry", +] + +[[package]] +name = "reqwest_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "bytes 1.7.2", + "futures 0.3.30", + "gpui", + "http_client", + "log", + "reqwest 0.12.8", + "serde", + "smol", + "tokio", +] + [[package]] name = "resvg" version = "0.41.0" @@ -9221,6 +9410,21 @@ dependencies = [ "util", ] +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + [[package]] name = "ring" version = "0.17.8" @@ -9231,8 +9435,8 @@ dependencies = [ "cfg-if", "getrandom 0.2.15", "libc", - "spin", - "untrusted", + "spin 0.9.8", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -9244,7 +9448,7 @@ checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" dependencies = [ "bitvec", "bytecheck", - "bytes 1.7.1", + "bytes 1.7.2", "hashbrown 0.12.3", "ptr_meta", "rend", @@ -9288,12 +9492,13 @@ dependencies = [ [[package]] name = "rodio" -version = "0.17.3" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b1bb7b48ee48471f55da122c0044fcc7600cfcc85db88240b89cb832935e611" +checksum = "6006a627c1a38d37f3d3a85c6575418cfe34a5392d60a686d0071e1c8d427acb" dependencies = [ "cpal", "hound", + "thiserror", ] [[package]] @@ -9380,14 +9585,14 @@ dependencies = [ "async-dispatcher", "async-std", "base64 0.22.1", - "bytes 1.7.1", + "bytes 1.7.2", "chrono", "data-encoding", "dirs 5.0.1", "futures 0.3.30", "glob", "rand 0.8.5", - "ring", + "ring 0.17.8", "serde", "serde_json", "shellexpand 3.1.0", @@ -9439,7 +9644,7 @@ checksum = "b082d80e3e3cc52b2ed634388d436fe1f4de6af5786cc2de9ba9737527bdf555" dependencies = [ "arrayvec", "borsh", - "bytes 1.7.1", + "bytes 1.7.2", "num-traits", "rand 0.8.5", "rkyv", @@ -9459,6 +9664,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" + [[package]] name = "rustc_version" version = "0.4.1" @@ -9508,6 +9719,18 @@ dependencies = [ "rustix 0.38.35", ] +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + [[package]] name = "rustls" version = "0.21.12" @@ -9515,11 +9738,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring", - "rustls-webpki", + "ring 0.17.8", + "rustls-webpki 0.101.7", "sct", ] +[[package]] +name = "rustls" +version = "0.23.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" +dependencies = [ + "once_cell", + "ring 0.17.8", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", +] + [[package]] name = "rustls-native-certs" version = "0.6.3" @@ -9527,7 +9764,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", - "rustls-pemfile", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.1.3", + "rustls-pki-types", "schannel", "security-framework", ] @@ -9541,14 +9791,41 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +dependencies = [ + "base64 0.22.1", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" + [[package]] name = "rustls-webpki" version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring 0.17.8", + "rustls-pki-types", + "untrusted 0.9.0", ] [[package]] @@ -9662,8 +9939,8 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -9846,10 +10123,13 @@ name = "semantic_index" version = "0.1.0" dependencies = [ "anyhow", + "arrayvec", + "blake3", "client", "clock", "collections", "env_logger", + "feature_flags", "fs", "futures 0.3.30", "futures-batch", @@ -9857,11 +10137,13 @@ dependencies = [ "heed", "http_client", "language", + "language_model", "languages", "log", "open_ai", "parking_lot", "project", + "reqwest_client", "serde", "serde_json", "settings", @@ -9947,9 +10229,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.127" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ "indexmap 2.4.0", "itoa", @@ -9960,9 +10242,9 @@ dependencies = [ [[package]] name = "serde_json_lenient" -version = "0.1.8" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc61c66b53a4035fcce237ef38043f4b2f0ebf918fd0e69541a5166104065581" +checksum = "a5d0bae483150302560d7cb52e7932f39b69a6fbdd099e48d33ef060a8c9c078" dependencies = [ "indexmap 2.4.0", "itoa", @@ -10286,7 +10568,7 @@ dependencies = [ name = "slash_commands_example" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] @@ -10304,17 +10586,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "sluice" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" -dependencies = [ - "async-channel 1.9.0", - "futures-core", - "futures-io", -] - [[package]] name = "smallvec" version = "1.13.2" @@ -10379,12 +10650,27 @@ dependencies = [ "futures 0.3.30", "gpui", "parking_lot", + "paths", "serde", "serde_json", "snippet", "util", ] +[[package]] +name = "snippets_ui" +version = "0.1.0" +dependencies = [ + "fuzzy", + "gpui", + "language", + "paths", + "picker", + "ui", + "util", + "workspace", +] + [[package]] name = "socket2" version = "0.4.10" @@ -10414,6 +10700,12 @@ dependencies = [ "smallvec", ] +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "spin" version = "0.9.8" @@ -10469,6 +10761,7 @@ dependencies = [ "libsqlite3-sys", "parking_lot", "smol", + "sqlformat", "thread_local", "util", "uuid", @@ -10485,9 +10778,9 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f895e3734318cc55f1fe66258926c9b910c124d47520339efecbb6c59cec7c1f" +checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" dependencies = [ "nom", "unicode_categories", @@ -10515,7 +10808,7 @@ dependencies = [ "atoi", "bigdecimal", "byteorder", - "bytes 1.7.1", + "bytes 1.7.2", "chrono", "crc", "crossbeam-queue", @@ -10536,8 +10829,8 @@ dependencies = [ "paste", "percent-encoding", "rust_decimal", - "rustls", - "rustls-pemfile", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "serde", "serde_json", "sha2", @@ -10550,7 +10843,7 @@ dependencies = [ "tracing", "url", "uuid", - "webpki-roots", + "webpki-roots 0.25.4", ] [[package]] @@ -10603,7 +10896,7 @@ dependencies = [ "bigdecimal", "bitflags 2.6.0", "byteorder", - "bytes 1.7.1", + "bytes 1.7.2", "chrono", "crc", "digest", @@ -10748,6 +11041,7 @@ dependencies = [ "menu", "picker", "project", + "reqwest_client", "rust-embed", "settings", "simplelog", @@ -10880,6 +11174,7 @@ dependencies = [ "text", "theme", "ui", + "unicode-segmentation", "util", "windows 0.58.0", ] @@ -11042,6 +11337,15 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] + [[package]] name = "synchronoise" version = "1.0.1" @@ -11062,17 +11366,16 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.30.13" +version = "0.31.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a5b4ddaee55fb2bea2bf0e5000747e5f5c0de765e5a5ff87f4cd106439f4bb3" +checksum = "355dbe4f8799b304b05e1b0f05fc59b2a18d36645cf169607da45bde2f69a1be" dependencies = [ - "cfg-if", "core-foundation-sys", "libc", + "memchr", "ntapi", - "once_cell", "rayon", - "windows 0.52.0", + "windows 0.54.0", ] [[package]] @@ -11083,7 +11386,18 @@ checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", "core-foundation 0.9.4", - "system-configuration-sys", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.6.0", + "core-foundation 0.9.4", + "system-configuration-sys 0.6.0", ] [[package]] @@ -11096,6 +11410,16 @@ dependencies = [ "libc", ] +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "system-deps" version = "6.2.2" @@ -11141,6 +11465,7 @@ dependencies = [ "project", "serde", "serde_json", + "settings", "theme", "ui", "util", @@ -11274,6 +11599,7 @@ dependencies = [ "gpui", "libc", "rand 0.8.5", + "regex", "release_channel", "schemars", "serde", @@ -11290,12 +11616,12 @@ dependencies = [ [[package]] name = "terminal_size" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" +checksum = "4f599bd7ca042cfdf8f4512b277c02ba102247820f9d9d4a9f521f496751a6ef" dependencies = [ "rustix 0.38.35", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -11374,7 +11700,6 @@ dependencies = [ "serde_json_lenient", "serde_repr", "settings", - "story", "util", "uuid", ] @@ -11421,18 +11746,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", @@ -11472,7 +11797,7 @@ dependencies = [ "fancy-regex", "lazy_static", "parking_lot", - "rustc-hash", + "rustc-hash 1.1.0", ] [[package]] @@ -11611,6 +11936,7 @@ dependencies = [ "pretty_assertions", "project", "recent_projects", + "remote", "rpc", "serde", "settings", @@ -11634,7 +11960,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" dependencies = [ "backtrace", - "bytes 1.7.1", + "bytes 1.7.2", "libc", "mio 1.0.2", "parking_lot", @@ -11683,7 +12009,18 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls 0.23.13", + "rustls-pki-types", "tokio", ] @@ -11697,6 +12034,7 @@ dependencies = [ "futures-io", "futures-util", "thiserror", + "tokio", ] [[package]] @@ -11736,11 +12074,11 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "futures-core", "futures-io", "futures-sink", @@ -11845,7 +12183,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f873044bf02dd1e8239e9c1293ea39dad76dc594ec16185d0a1bf31d8dc8d858" dependencies = [ "bitflags 1.3.2", - "bytes 1.7.1", + "bytes 1.7.2", "futures-core", "futures-util", "http 0.2.12", @@ -11863,7 +12201,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" dependencies = [ "bitflags 2.6.0", - "bytes 1.7.1", + "bytes 1.7.2", "futures-core", "futures-util", "http 0.2.12", @@ -11920,16 +12258,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -11974,110 +12302,122 @@ dependencies = [ [[package]] name = "tree-sitter" -version = "0.22.6" -source = "git+https://github.com/tree-sitter/tree-sitter?rev=7f4a57817d58a2f134fe863674acad6bbf007228#7f4a57817d58a2f134fe863674acad6bbf007228" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20f4cd3642c47a85052a887d86704f4eac272969f61b686bdd3f772122aabaff" dependencies = [ "cc", "regex", + "regex-syntax 0.8.4", "tree-sitter-language", "wasmtime-c-api-impl", ] [[package]] name = "tree-sitter-bash" -version = "0.21.0" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5244703ad2e08a616d859a0557d7aa290adcd5e0990188a692e628ffe9dce40" +checksum = "3aa5e1c6bd02c0053f3f68edcf5d8866b38a8640584279e30fca88149ce14dda" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-c" -version = "0.21.4" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f956d5351d62652864a4ff3ae861747e7a1940dc96c9998ae400ac0d3ce30427" +checksum = "c8b3fb515e498e258799a31d78e6603767cd6892770d9e2290ec00af5c3ad80b" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-cpp" -version = "0.22.3" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d509a22a992790d38f2c291961ff8a1ff016c437c7ec6befc9220b8eec8918c" +checksum = "1d67e862242878d6ee50e1e5814f267ee3eea0168aea2cdbd700ccfb4c74b6d3" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-css" -version = "0.21.1" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e08e324b1cf60fd3291774b49724c66de2ce8fcf4d358d0b4b82e37b41b1c9b" +checksum = "8d0018d6b1692a806f9cddaa1e5616951fd58840c39a0b21401b55ab3df12292" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", +] + +[[package]] +name = "tree-sitter-diff" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfe1e5ca280a65dfe5ba4205c1bcc84edf486464fed315db53dee6da9a335889" +dependencies = [ + "cc", + "tree-sitter-language", ] [[package]] name = "tree-sitter-elixir" -version = "0.2.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df94bf7f057768b1cab2ee1f14812ed4ae33f9e04d09254043eeaa797db4ef70" +checksum = "97bf0efa4be41120018f23305b105ad4dfd3be1b7f302dc4071d0e6c2dec3a32" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-embedded-template" -version = "0.20.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33817ade928c73a32d4f904a602321e09de9fc24b71d106f3b4b3f8ab30dcc38" +checksum = "9644d7586ebe850c84037ee2f4804dda4a9348eef053be6b1e0d7712342a2495" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-go" -version = "0.21.2" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8d702a98d3c7e70e466456e58ff2b1ac550bf1e29b97e5770676d2fdabec00d" +checksum = "caf57626e4c9b6d6efaf8a8d5ee1241c5f178ae7bfdf693713ae6a774f01424e" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-gomod" version = "1.0.2" -source = "git+https://github.com/camdencheek/tree-sitter-go-mod?rev=1f55029bacd0a6a11f6eb894c4312d429dcf735c#1f55029bacd0a6a11f6eb894c4312d429dcf735c" +source = "git+https://github.com/zed-industries/tree-sitter-go-mod?rev=a9aea5e358cde4d0f8ff20b7bc4fa311e359c7ca#a9aea5e358cde4d0f8ff20b7bc4fa311e359c7ca" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-gowork" version = "0.0.1" -source = "git+https://github.com/d1y/tree-sitter-go-work?rev=dcbabff454703c3a4bc98a23cf8778d4be46fd22#dcbabff454703c3a4bc98a23cf8778d4be46fd22" +source = "git+https://github.com/zed-industries/tree-sitter-go-work?rev=acb0617bf7f4fda02c6217676cc64acb89536dc7#acb0617bf7f4fda02c6217676cc64acb89536dc7" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-heex" version = "0.0.1" -source = "git+https://github.com/phoenixframework/tree-sitter-heex?rev=6dd0303acf7138dd2b9b432a229e16539581c701#6dd0303acf7138dd2b9b432a229e16539581c701" +source = "git+https://github.com/zed-industries/tree-sitter-heex?rev=1dd45142fbb05562e35b2040c6129c9bca346592#1dd45142fbb05562e35b2040c6129c9bca346592" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] @@ -12092,96 +12432,96 @@ dependencies = [ [[package]] name = "tree-sitter-jsdoc" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d07920101ff12a59574890318a37fa7e18d9c06d9aa4be334aa24adbb480f18" +checksum = "f8c4049eb0ad690e34e5f63640f75ce12a2ff8ba18344d0a13926805b139c0c8" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-json" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b737dcb73c35d74b7d64a5f3dde158113c86a012bf3cee2bfdf2150d23b05db" +checksum = "86a5d6b3ea17e06e7a34aabeadd68f5866c0d0f9359155d432095f8b751865e4" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-language" version = "0.1.0" -source = "git+https://github.com/tree-sitter/tree-sitter?rev=7f4a57817d58a2f134fe863674acad6bbf007228#7f4a57817d58a2f134fe863674acad6bbf007228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2545046bd1473dac6c626659cc2567c6c0ff302fc8b84a56c4243378276f7f57" [[package]] name = "tree-sitter-md" -version = "0.2.3" -source = "git+https://github.com/zed-industries/tree-sitter-markdown?rev=e3855e37f8f2c71aa7513c18a9c95fb7461b1b10#e3855e37f8f2c71aa7513c18a9c95fb7461b1b10" +version = "0.3.2" +source = "git+https://github.com/zed-industries/tree-sitter-markdown?rev=4cfa6aad6b75052a5077c80fd934757d9267d81b#4cfa6aad6b75052a5077c80fd934757d9267d81b" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-python" -version = "0.21.0" +version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4066c6cf678f962f8c2c4561f205945c84834cce73d981e71392624fdc390a9" +checksum = "65661b1a3e24139e2e54207e47d910ab07e28790d78efc7d5dc3a11ce2a110eb" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-regex" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ff1286fe9651b2797484839ffa37aa76c8618d4ccb6836d7e31765dfd60c0d5" +checksum = "0b9a7087b1cf769c96b7e74414947df067fb6135f04d176fd23be08b9396cc0e" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-ruby" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0031f687c0772f2dad7b77104c43428611099a1804c81244ada21560f41f0b1" +checksum = "6ec5ee842e27791e0adffa0b2a177614de51d2a26e5c7e84d014ed7f097e5ed0" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-rust" -version = "0.21.2" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "277690f420bf90741dea984f3da038ace46c4fe6047cba57a66822226cde1c93" +checksum = "cffbbcb780348fbae8395742ae5b34c1fd794e4085d43aac9f259387f9a84dc8" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-typescript" -version = "0.21.2" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecb35d98a688378e56c18c9c159824fd16f730ccbea19aacf4f206e5d5438ed9" +checksum = "aecf1585ae2a9dddc2b1d4c0e2140b2ec9876e2a25fd79de47fcf7dae0384685" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] name = "tree-sitter-yaml" version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aad27ec46ad343d8b514f64dd3fdffb478c592ece561b6c935d90ef55589c6b6" +source = "git+https://github.com/zed-industries/tree-sitter-yaml?rev=baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a#baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" dependencies = [ "cc", - "tree-sitter", + "tree-sitter-language", ] [[package]] @@ -12203,12 +12543,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" dependencies = [ "byteorder", - "bytes 1.7.1", + "bytes 1.7.2", "data-encoding", "http 0.2.12", "httparse", "log", - "native-tls", "rand 0.8.5", "sha1", "thiserror", @@ -12223,7 +12562,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" dependencies = [ "byteorder", - "bytes 1.7.1", + "bytes 1.7.2", "data-encoding", "http 1.1.0", "httparse", @@ -12278,6 +12617,7 @@ dependencies = [ "story", "strum 0.25.0", "theme", + "ui_macros", "windows 0.58.0", ] @@ -12292,6 +12632,16 @@ dependencies = [ "ui", ] +[[package]] +name = "ui_macros" +version = "0.1.0" +dependencies = [ + "convert_case 0.6.0", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "unicase" version = "2.7.0" @@ -12382,6 +12732,12 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "untrusted" version = "0.9.0" @@ -12686,12 +13042,12 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4378d202ff965b011c64817db11d5829506d3404edeadb61f190d111da3f231c" dependencies = [ - "bytes 1.7.1", + "bytes 1.7.2", "futures-channel", "futures-util", "headers", "http 0.2.12", - "hyper", + "hyper 0.14.30", "log", "mime", "mime_guess", @@ -12804,9 +13160,9 @@ dependencies = [ [[package]] name = "wasm-encoder" -version = "0.207.0" +version = "0.215.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d996306fb3aeaee0d9157adbe2f670df0236caf19f6728b221e92d0f27b3fe17" +checksum = "4fb56df3e06b8e6b77e37d2969a50ba51281029a9aeb3855e76b7f49b6418847" dependencies = [ "leb128", ] @@ -12827,6 +13183,19 @@ dependencies = [ "wasmparser 0.201.0", ] +[[package]] +name = "wasm-streams" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e072d4e72f700fb3443d8fe94a39315df013eef1104903cdb0a2abd322bbecd" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wasmparser" version = "0.201.0" @@ -12840,35 +13209,38 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.207.0" +version = "0.215.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e19bb9f8ab07616da582ef8adb24c54f1424c7ec876720b7da9db8ec0626c92c" +checksum = "53fbde0881f24199b81cf49b6ff8f9c145ac8eb1b7fc439adb5c099734f7d90e" dependencies = [ "ahash 0.8.11", "bitflags 2.6.0", "hashbrown 0.14.5", "indexmap 2.4.0", "semver", + "serde", ] [[package]] name = "wasmprinter" -version = "0.207.0" +version = "0.215.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c2d8a7b4dabb460208e6b4334d9db5766e84505038b2529e69c3d07ac619115" +checksum = "d8e9a325d85053408209b3d2ce5eaddd0dd6864d1cff7a007147ba073157defc" dependencies = [ "anyhow", - "wasmparser 0.207.0", + "termcolor", + "wasmparser 0.215.0", ] [[package]] name = "wasmtime" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f92a1370c66a0022e6d92dcc277e2c84f5dece19569670b8ce7db8162560d8b6" +checksum = "7e4a5b05e9f1797e557e79f0cf04348eaa7a232596939ef4762838ddf7a6127a" dependencies = [ "anyhow", "async-trait", + "bitflags 2.6.0", "bumpalo", "cc", "cfg-if", @@ -12880,8 +13252,7 @@ dependencies = [ "log", "mach2", "memfd", - "memoffset", - "object 0.33.0", + "object", "once_cell", "paste", "postcard", @@ -12893,7 +13264,7 @@ dependencies = [ "smallvec", "sptr", "target-lexicon", - "wasmparser 0.207.0", + "wasmparser 0.215.0", "wasmtime-asm-macros", "wasmtime-component-macro", "wasmtime-component-util", @@ -12909,18 +13280,18 @@ dependencies = [ [[package]] name = "wasmtime-asm-macros" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dee8679c974a7f258c03d60d3c747c426ed219945b6d08cbc77fd2eab15b2d1" +checksum = "64414227e19556d4372f9688458c5673606de83473eb66cd0514d36ea8808cab" dependencies = [ "cfg-if", ] [[package]] name = "wasmtime-c-api-impl" -version = "21.0.1" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76af8b62c8d2814b7d5975c5dc140122e4c086150db6c15d25a4b76f11c929dd" +checksum = "765e302e7d9125e614aaeec3ad6b6083605393004eca00214106a4ff6b47fc58" dependencies = [ "anyhow", "log", @@ -12932,9 +13303,9 @@ dependencies = [ [[package]] name = "wasmtime-c-api-macros" -version = "21.0.1" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d74b92f917c9ced9c6262a00e9cb982ebac183e6900b4d44e2480f936b9495eb" +checksum = "2d09d02eaa84aa2de5babee7b0296557ad6e4903bb10aa8d135e393e753a43d6" dependencies = [ "proc-macro2", "quote", @@ -12942,9 +13313,9 @@ dependencies = [ [[package]] name = "wasmtime-component-macro" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32cae30035f1cf97dcc6657c979cf39f99ce6be93583675eddf4aeaa5548509c" +checksum = "d3ead31b73689602225742920adbcd881f5656702c1a3b4830862c0c66731727" dependencies = [ "anyhow", "proc-macro2", @@ -12952,20 +13323,20 @@ dependencies = [ "syn 2.0.76", "wasmtime-component-util", "wasmtime-wit-bindgen", - "wit-parser 0.207.0", + "wit-parser 0.215.0", ] [[package]] name = "wasmtime-component-util" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7ae611f08cea620c67330925be28a96115bf01f8f393a6cbdf4856a86087134" +checksum = "ab2c778661800e1dcd8ba3e15ff042299709e0a4c512525d9cbb604a04c0421b" [[package]] name = "wasmtime-cranelift" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2909406a6007e28be964067167890bca4574bd48a9ff18f1fa9f4856d89ea40" +checksum = "9f7ee1f436bcf7d213ef7c2e9d44caffcd57e540ccf997d013384c2ae9b82db7" dependencies = [ "anyhow", "cfg-if", @@ -12975,36 +13346,38 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "cranelift-wasm", - "gimli 0.28.1", + "gimli 0.29.0", "log", - "object 0.33.0", + "object", "target-lexicon", "thiserror", - "wasmparser 0.207.0", + "wasmparser 0.215.0", "wasmtime-environ", "wasmtime-versioned-export-macros", ] [[package]] name = "wasmtime-environ" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40e227f9ed2f5421473723d6c0352b5986e6e6044fde5410a274a394d726108f" +checksum = "fa8c33adfb3b9f8d6ef716bc55aea5e6b2275cd5a6721ec8c837d1cb0c471516" dependencies = [ "anyhow", "cpp_demangle", + "cranelift-bitset", "cranelift-entity", - "gimli 0.28.1", + "gimli 0.29.0", "indexmap 2.4.0", "log", - "object 0.33.0", + "object", "postcard", "rustc-demangle", + "semver", "serde", "serde_derive", "target-lexicon", - "wasm-encoder 0.207.0", - "wasmparser 0.207.0", + "wasm-encoder 0.215.0", + "wasmparser 0.215.0", "wasmprinter", "wasmtime-component-util", "wasmtime-types", @@ -13012,9 +13385,9 @@ dependencies = [ [[package]] name = "wasmtime-fiber" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42edb392586d07038c1638e854382db916b6ca7845a2e6a7f8dc49e08907acdd" +checksum = "9f3227ed807c2dda9dd770c241023fcd6e48e6722c1c26ff79fc3604d412e884" dependencies = [ "anyhow", "cc", @@ -13027,9 +13400,9 @@ dependencies = [ [[package]] name = "wasmtime-jit-icache-coherence" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afe088f9b56bb353adaf837bf7e10f1c2e1676719dd5be4cac8e37f2ba1ee5bc" +checksum = "fa89fc440f0edca882ba6d1890608898e6f0193afdc504c0a64478ec53622bd6" dependencies = [ "anyhow", "cfg-if", @@ -13039,28 +13412,29 @@ dependencies = [ [[package]] name = "wasmtime-slab" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ff75cafffe47b04b036385ce3710f209153525b0ed19d57b0cf44a22d446460" +checksum = "682b7a5b6772c4e4de8c696fc619ec97930b5e89098db9bee22c1136e002438b" [[package]] name = "wasmtime-types" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f2fa462bfea3220711c84e2b549f147e4df89eeb49b8a2a3d89148f6cc4a8b1" +checksum = "4a95ea5572f8c3ffe777af21aa00a92097ded291a342fecad9f2c6a972ecea99" dependencies = [ + "anyhow", "cranelift-entity", "serde", "serde_derive", "smallvec", - "wasmparser 0.207.0", + "wasmparser 0.215.0", ] [[package]] name = "wasmtime-versioned-export-macros" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4cedc5bfef3db2a85522ee38564b47ef3b7fc7c92e94cacbce99808e63cdd47" +checksum = "ac3621bfccd4e4336ae141d62b96e96316c0f23c47d64e9700594ebe3c4d9a10" dependencies = [ "proc-macro2", "quote", @@ -13069,14 +13443,14 @@ dependencies = [ [[package]] name = "wasmtime-wasi" -version = "21.0.1" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdbbe94245904d4c96c7c5f7b55bad896cc27908644efd9442063c0748b631fc" +checksum = "545ae8298ffce025604f7480f9c7d6948c985bef7ce9aee249ef79307813e83c" dependencies = [ "anyhow", "async-trait", "bitflags 2.6.0", - "bytes 1.7.1", + "bytes 1.7.2", "cap-fs-ext", "cap-net-ext", "cap-rand", @@ -13100,16 +13474,16 @@ dependencies = [ [[package]] name = "wasmtime-winch" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b27054fed6be4f3800aba5766f7ef435d4220ce290788f021a08d4fa573108" +checksum = "d1d3e99f6bba37864487c9356398667699935b9cfa3655ed2b153b9428b3dd21" dependencies = [ "anyhow", "cranelift-codegen", - "gimli 0.28.1", - "object 0.33.0", + "gimli 0.29.0", + "object", "target-lexicon", - "wasmparser 0.207.0", + "wasmparser 0.215.0", "wasmtime-cranelift", "wasmtime-environ", "winch-codegen", @@ -13117,14 +13491,14 @@ dependencies = [ [[package]] name = "wasmtime-wit-bindgen" -version = "21.0.1" +version = "24.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c936a52ce69c28de2aa3b5fb4f2dbbb2966df304f04cccb7aca4ba56d915fda0" +checksum = "ee0f4524da226d2cb503d794c8928de6bc24878758cebd4e383c946e9fdb8b3a" dependencies = [ "anyhow", "heck 0.4.1", "indexmap 2.4.0", - "wit-parser 0.207.0", + "wit-parser 0.215.0", ] [[package]] @@ -13231,6 +13605,25 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + [[package]] name = "webpki-roots" version = "0.25.4" @@ -13266,6 +13659,7 @@ dependencies = [ "util", "vim", "workspace", + "zed_actions", ] [[package]] @@ -13304,9 +13698,9 @@ dependencies = [ [[package]] name = "wiggle" -version = "21.0.1" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a89ea6f74ece6d1cfbd089783006b8eb69a0219ca83cad22068f0d9fa9df3f91" +checksum = "cc850ca3c02c5835934d23f28cec4c5a3fb66fe0b4ecd968bbb35609dda5ddc0" dependencies = [ "anyhow", "async-trait", @@ -13319,9 +13713,9 @@ dependencies = [ [[package]] name = "wiggle-generate" -version = "21.0.1" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36beda94813296ecaf0d91b7ada9da073fd41865ba339bdd3b7764e2e785b8e9" +checksum = "634b8804a67200bcb43ea8af5f7c53e862439a086b68b16fd333454bc74d5aab" dependencies = [ "anyhow", "heck 0.4.1", @@ -13334,9 +13728,9 @@ dependencies = [ [[package]] name = "wiggle-macro" -version = "21.0.1" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b47d2b4442ce93106dba5d1a9c59d5f85b5732878bb3d0598d3c93c0d01b16b" +checksum = "474b7cbdb942c74031e619d66c600bba7f73867c5800fc2c2306cf307649be2f" dependencies = [ "proc-macro2", "quote", @@ -13377,40 +13771,21 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "winch-codegen" -version = "0.19.1" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dc69899ccb2da7daa4df31426dcfd284b104d1a85e1dae35806df0c46187f87" +checksum = "c139fb9298d9651b6869afd544e567ca2448cd5f5ddcb24e4bb86a1ee187c8b3" dependencies = [ "anyhow", "cranelift-codegen", - "gimli 0.28.1", + "gimli 0.29.0", "regalloc2", "smallvec", "target-lexicon", - "wasmparser 0.207.0", + "wasmparser 0.215.0", "wasmtime-cranelift", "wasmtime-environ", ] -[[package]] -name = "windows" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" -dependencies = [ - "windows-core 0.52.0", - "windows-targets 0.52.6", -] - [[package]] name = "windows" version = "0.54.0" @@ -13485,6 +13860,17 @@ dependencies = [ "syn 2.0.76", ] +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result 0.2.0", + "windows-strings", + "windows-targets 0.52.6", +] + [[package]] name = "windows-result" version = "0.1.2" @@ -13894,9 +14280,9 @@ dependencies = [ [[package]] name = "wit-parser" -version = "0.207.0" +version = "0.215.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78c83dab33a9618d86cfe3563cc864deffd08c17efc5db31a3b7cd1edeffe6e1" +checksum = "935a97eaffd57c3b413aa510f8f0b550a4a9fe7d59e79cd8b89a83dcb860321f" dependencies = [ "anyhow", "id-arena", @@ -13907,7 +14293,7 @@ dependencies = [ "serde_derive", "serde_json", "unicode-xid", - "wasmparser 0.207.0", + "wasmparser 0.215.0", ] [[package]] @@ -13950,6 +14336,8 @@ dependencies = [ "parking_lot", "postage", "project", + "release_channel", + "remote", "schemars", "serde", "serde_json", @@ -14066,7 +14454,7 @@ dependencies = [ [[package]] name = "xim" version = "0.4.0" -source = "git+https://github.com/npmania/xim-rs?rev=27132caffc5b9bc9c432ca4afad184ab6e7c16af#27132caffc5b9bc9c432ca4afad184ab6e7c16af" +source = "git+https://github.com/XDeme1/xim-rs?rev=d50d461764c2213655cd9cf65a0ea94c70d3c4fd#d50d461764c2213655cd9cf65a0ea94c70d3c4fd" dependencies = [ "ahash 0.8.11", "hashbrown 0.14.5", @@ -14079,7 +14467,7 @@ dependencies = [ [[package]] name = "xim-ctext" version = "0.3.0" -source = "git+https://github.com/npmania/xim-rs?rev=27132caffc5b9bc9c432ca4afad184ab6e7c16af#27132caffc5b9bc9c432ca4afad184ab6e7c16af" +source = "git+https://github.com/XDeme1/xim-rs?rev=d50d461764c2213655cd9cf65a0ea94c70d3c4fd#d50d461764c2213655cd9cf65a0ea94c70d3c4fd" dependencies = [ "encoding_rs", ] @@ -14087,7 +14475,7 @@ dependencies = [ [[package]] name = "xim-parser" version = "0.2.1" -source = "git+https://github.com/npmania/xim-rs?rev=27132caffc5b9bc9c432ca4afad184ab6e7c16af#27132caffc5b9bc9c432ca4afad184ab6e7c16af" +source = "git+https://github.com/XDeme1/xim-rs?rev=d50d461764c2213655cd9cf65a0ea94c70d3c4fd#d50d461764c2213655cd9cf65a0ea94c70d3c4fd" dependencies = [ "bitflags 2.6.0", ] @@ -14144,9 +14532,9 @@ dependencies = [ [[package]] name = "yansi" -version = "0.5.1" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "yazi" @@ -14229,13 +14617,14 @@ dependencies = [ [[package]] name = "zed" -version = "0.153.0" +version = "0.159.0" dependencies = [ "activity_indicator", "anyhow", "ashpd", "assets", "assistant", + "async-watch", "audio", "auto_update", "backtrace", @@ -14274,7 +14663,6 @@ dependencies = [ "image_viewer", "inline_completion_button", "install_cli", - "isahc", "journal", "language", "language_model", @@ -14302,6 +14690,7 @@ dependencies = [ "release_channel", "remote", "repl", + "reqwest_client", "rope", "search", "serde", @@ -14309,9 +14698,11 @@ dependencies = [ "session", "settings", "settings_ui", + "shellexpand 2.1.2", "simplelog", "smol", "snippet_provider", + "snippets_ui", "supermaven", "sysinfo", "tab_switcher", @@ -14335,6 +14726,7 @@ dependencies = [ "winresource", "workspace", "zed_actions", + "zstd", ] [[package]] @@ -14347,75 +14739,66 @@ dependencies = [ [[package]] name = "zed_astro" -version = "0.1.0" +version = "0.1.1" dependencies = [ "serde", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_clojure" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_csharp" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_dart" -version = "0.0.3" +version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_deno" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_elixir" -version = "0.0.9" +version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_elm" version = "0.0.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_emmet" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_erlang" -version = "0.0.1" -dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "zed_extension_api" version = "0.1.0" dependencies = [ - "serde", - "serde_json", - "wit-bindgen", + "zed_extension_api 0.1.0", ] [[package]] @@ -14430,81 +14813,82 @@ dependencies = [ ] [[package]] -name = "zed_gleam" +name = "zed_extension_api" version = "0.2.0" dependencies = [ - "html_to_markdown 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde", + "serde_json", + "wit-bindgen", ] [[package]] name = "zed_glsl" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_haskell" version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_html" -version = "0.1.2" +version = "0.1.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_lua" -version = "0.0.3" +version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_ocaml" -version = "0.0.2" +version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_php" -version = "0.1.3" +version = "0.2.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_prisma" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] -name = "zed_purescript" -version = "0.0.1" +name = "zed_proto" +version = "0.2.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] -name = "zed_ruby" -version = "0.2.0" +name = "zed_purescript" +version = "0.0.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_ruff" -version = "0.0.2" +version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] @@ -14512,57 +14896,42 @@ name = "zed_snippets" version = "0.0.5" dependencies = [ "serde_json", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "zed_svelte" -version = "0.1.1" -dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_terraform" -version = "0.1.0" +version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_test_extension" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.2.0", ] [[package]] name = "zed_toml" version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_uiua" version = "0.0.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "zed_vue" -version = "0.1.0" -dependencies = [ - "serde", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_zig" -version = "0.3.0" +version = "0.3.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] @@ -14622,7 +14991,7 @@ dependencies = [ "async-std", "async-trait", "asynchronous-codec", - "bytes 1.7.1", + "bytes 1.7.2", "crossbeam-queue", "dashmap 5.5.3", "futures-channel", diff --git a/Cargo.toml b/Cargo.toml index a7553b956c44c8..332fcb5d0f0406 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ members = [ "crates/diagnostics", "crates/docs_preprocessor", "crates/editor", + "crates/evals", "crates/extension", "crates/extension_api", "crates/extension_cli", @@ -87,6 +88,7 @@ members = [ "crates/remote", "crates/remote_server", "crates/repl", + "crates/reqwest_client", "crates/rich_text", "crates/rope", "crates/rpc", @@ -98,6 +100,7 @@ members = [ "crates/settings_ui", "crates/snippet", "crates/snippet_provider", + "crates/snippets_ui", "crates/sqlez", "crates/sqlez_macros", "crates/story", @@ -119,6 +122,8 @@ members = [ "crates/title_bar", "crates/ui", "crates/ui_input", + "crates/ui_macros", + "crates/reqwest_client", "crates/util", "crates/vcs_menu", "crates/vim", @@ -141,7 +146,6 @@ members = [ "extensions/elm", "extensions/emmet", "extensions/erlang", - "extensions/gleam", "extensions/glsl", "extensions/haskell", "extensions/html", @@ -150,17 +154,15 @@ members = [ "extensions/php", "extensions/perplexity", "extensions/prisma", + "extensions/proto", "extensions/purescript", "extensions/ruff", - "extensions/ruby", "extensions/slash-commands-example", "extensions/snippets", - "extensions/svelte", "extensions/terraform", "extensions/test-extension", "extensions/toml", "extensions/uiua", - "extensions/vue", "extensions/zig", # @@ -172,6 +174,7 @@ members = [ default-members = ["crates/zed"] [workspace.dependencies] + # # Workspace member crates # @@ -215,9 +218,8 @@ git = { path = "crates/git" } git_hosting_providers = { path = "crates/git_hosting_providers" } go_to_line = { path = "crates/go_to_line" } google_ai = { path = "crates/google_ai" } -gpui = { path = "crates/gpui" } +gpui = { path = "crates/gpui", default-features = false, features = ["http_client"]} gpui_macros = { path = "crates/gpui_macros" } -handlebars = "4.3" headless = { path = "crates/headless" } helix = { path = "crates/helix" } html_to_markdown = { path = "crates/html_to_markdown" } @@ -262,6 +264,7 @@ release_channel = { path = "crates/release_channel" } remote = { path = "crates/remote" } remote_server = { path = "crates/remote_server" } repl = { path = "crates/repl" } +reqwest_client = { path = "crates/reqwest_client" } rich_text = { path = "crates/rich_text" } rope = { path = "crates/rope" } rpc = { path = "crates/rpc" } @@ -273,6 +276,7 @@ settings = { path = "crates/settings" } settings_ui = { path = "crates/settings_ui" } snippet = { path = "crates/snippet" } snippet_provider = { path = "crates/snippet_provider" } +snippets_ui = { path = "crates/snippets_ui" } sqlez = { path = "crates/sqlez" } sqlez_macros = { path = "crates/sqlez_macros" } story = { path = "crates/story" } @@ -294,6 +298,7 @@ time_format = { path = "crates/time_format" } title_bar = { path = "crates/title_bar" } ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } +ui_macros = { path = "crates/ui_macros" } util = { path = "crates/util" } vcs_menu = { path = "crates/vcs_menu" } vim = { path = "crates/vim" } @@ -311,7 +316,9 @@ aho-corasick = "1.1" alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "91d034ff8b53867143c005acfaa14609147c9a2c" } any_vec = "0.14" anyhow = "1.0.86" +arrayvec = { version = "0.7.4", features = ["serde"] } ashpd = "0.9.1" +async-compat = "0.2.1" async-compression = { version = "0.4", features = ["gzip", "futures-io"] } async-dispatcher = "0.1" async-fs = "1.6" @@ -319,7 +326,7 @@ async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "8 async-recursion = "1.0.0" async-tar = "0.5.0" async-trait = "0.1" -async-tungstenite = "0.23" +async-tungstenite = "0.24" async-watch = "0.3.1" async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } base64 = "0.22" @@ -327,12 +334,15 @@ bitflags = "2.6.0" blade-graphics = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" } blade-macros = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" } blade-util = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" } +blake3 = "1.5.3" +bytes = "1.0" cargo_metadata = "0.18" cargo_toml = "0.20" chrono = { version = "0.4", features = ["serde"] } clap = { version = "4.4", features = ["derive"] } clickhouse = "0.11.6" cocoa = "0.26" +convert_case = "0.6.0" core-foundation = "0.9.3" core-foundation-sys = "0.8.6" ctor = "0.2.6" @@ -348,18 +358,15 @@ futures-batch = "0.6.1" futures-lite = "1.13" git2 = { version = "0.19", default-features = false } globset = "0.4" +handlebars = "4.3" heed = { version = "0.20.1", features = ["read-txn-no-tls"] } hex = "0.4.3" -hyper = "0.14" html5ever = "0.27.0" +hyper = "0.14" ignore = "0.4.22" image = "0.25.1" indexmap = { version = "1.6.2", features = ["serde"] } indoc = "2" -# We explicitly disable http2 support in isahc. -isahc = { version = "1.7.2", default-features = false, features = [ - "text-decoding", -] } itertools = "0.13.0" jsonwebtoken = "9.3" libc = "0.2" @@ -374,9 +381,9 @@ ordered-float = "2.1.1" palette = { version = "0.7.5", default-features = false, features = ["std"] } parking_lot = "0.12.1" pathdiff = "0.2" -profiling = "1" postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = "1.3.0" +profiling = "1" prost = "0.9" prost-build = "0.9" prost-types = "0.9" @@ -384,18 +391,28 @@ pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" regex = "1.5" repair_json = "0.1.0" +reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29", default-features = false, features = [ + "charset", + "http2", + "macos-system-configuration", + "rustls-tls-native-roots", + "socks", + "stream", +] } rsa = "0.9.6" runtimelib = { version = "0.15", default-features = false, features = [ "async-dispatcher-runtime", ] } rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } +rustls = "0.20.3" +rustls-native-certs = "0.8.0" schemars = { version = "0.8", features = ["impl_json_schema"] } semver = "1.0" serde = { version = "1.0", features = ["derive", "rc"] } serde_derive = { version = "1.0", features = ["deserialize_in_place"] } serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] } -serde_json_lenient = { version = "0.1", features = [ +serde_json_lenient = { version = "0.2", features = [ "preserve_order", "raw_value", ] } @@ -408,11 +425,12 @@ similar = "1.3" simplelog = "0.12.2" smallvec = { version = "1.6", features = ["union"] } smol = "1.2" +sqlformat = "0.2" strsim = "0.11" strum = { version = "0.25.0", features = ["derive"] } subtle = "2.5.0" sys-locale = "0.3.1" -sysinfo = "0.30.7" +sysinfo = "0.31.0" tempfile = "3.9.0" thiserror = "1.0.29" tiktoken-rs = "0.5.9" @@ -425,50 +443,52 @@ time = { version = "0.3", features = [ ] } tiny_http = "0.8" toml = "0.8" -tokio = { version = "1", features = ["full"] } +tokio = { version = "1" } tower-http = "0.4.4" -tree-sitter = { version = "0.22", features = ["wasm"] } -tree-sitter-bash = "0.21" -tree-sitter-c = "0.21" -tree-sitter-cpp = "0.22" -tree-sitter-css = "0.21" -tree-sitter-elixir = "0.2" -tree-sitter-embedded-template = "0.20.0" -tree-sitter-go = "0.21" -tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "1f55029bacd0a6a11f6eb894c4312d429dcf735c", package = "tree-sitter-gomod" } -tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work", rev = "dcbabff454703c3a4bc98a23cf8778d4be46fd22" } -tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "6dd0303acf7138dd2b9b432a229e16539581c701" } +tree-sitter = { version = "0.23", features = ["wasm"] } +tree-sitter-bash = "0.23" +tree-sitter-c = "0.23" +tree-sitter-cpp = "0.23" +tree-sitter-css = "0.23" +tree-sitter-elixir = "0.3" +tree-sitter-embedded-template = "0.23.0" +tree-sitter-go = "0.23" +tree-sitter-go-mod = { git = "https://github.com/zed-industries/tree-sitter-go-mod", rev = "a9aea5e358cde4d0f8ff20b7bc4fa311e359c7ca", package = "tree-sitter-gomod" } +tree-sitter-gowork = { git = "https://github.com/zed-industries/tree-sitter-go-work", rev = "acb0617bf7f4fda02c6217676cc64acb89536dc7" } +tree-sitter-heex = { git = "https://github.com/zed-industries/tree-sitter-heex", rev = "1dd45142fbb05562e35b2040c6129c9bca346592" } +tree-sitter-diff = "0.1.0" tree-sitter-html = "0.20" -tree-sitter-jsdoc = "0.21" -tree-sitter-json = "0.21" -tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "e3855e37f8f2c71aa7513c18a9c95fb7461b1b10" } -protols-tree-sitter-proto = "0.2" -tree-sitter-python = "0.21" -tree-sitter-regex = "0.21" -tree-sitter-ruby = "0.21" -tree-sitter-rust = "0.21" -tree-sitter-typescript = "0.21" -tree-sitter-yaml = "0.6" -unindent = "0.1.7" +tree-sitter-jsdoc = "0.23" +tree-sitter-json = "0.23" +tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "4cfa6aad6b75052a5077c80fd934757d9267d81b" } +tree-sitter-python = "0.23" +tree-sitter-regex = "0.23" +tree-sitter-ruby = "0.23" +tree-sitter-rust = "0.23" +tree-sitter-typescript = "0.23" +tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } unicase = "2.6" +unindent = "0.1.7" unicode-segmentation = "1.10" url = "2.2" uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] } -wasmparser = "0.201" -wasm-encoder = "0.201" -wasmtime = { version = "21.0.1", default-features = false, features = [ +wasmparser = "0.215" +wasm-encoder = "0.215" +wasmtime = { version = "24", default-features = false, features = [ "async", "demangle", "runtime", "cranelift", "component-model", ] } -wasmtime-wasi = "21.0.1" +wasmtime-wasi = "24" which = "6.0.0" wit-component = "0.201" +zstd = "0.11" [workspace.dependencies.async-stripe] -version = "0.39" +git = "https://github.com/zed-industries/async-stripe" +rev = "3672dd4efb7181aa597bf580bf5a2f5d23db6735" default-features = false features = [ "runtime-tokio-hyper-rustls", @@ -485,7 +505,7 @@ version = "0.58" features = [ "implement", "Foundation_Numerics", - "System", + "Storage", "System_Threading", "UI_ViewManagement", "Wdk_System_SystemServices", @@ -516,13 +536,10 @@ features = [ "Win32_UI_Input_Ime", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", + "Win32_UI_Shell_Common", "Win32_UI_WindowsAndMessaging", ] -[patch.crates-io] -# Patch Tree-sitter for updated wasmtime. -tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "7f4a57817d58a2f134fe863674acad6bbf007228" } - [profile.dev] split-debuginfo = "unpacked" debug = "limited" diff --git a/Cross.toml b/Cross.toml new file mode 100644 index 00000000000000..b5f0f1103af2ba --- /dev/null +++ b/Cross.toml @@ -0,0 +1,2 @@ +[build] +dockerfile = "Dockerfile-cross" diff --git a/Dockerfile b/Dockerfile-collab similarity index 81% rename from Dockerfile rename to Dockerfile-collab index 70c47c9f671e18..507ad3be19bc00 100644 --- a/Dockerfile +++ b/Dockerfile-collab @@ -4,11 +4,19 @@ FROM rust:1.81-bookworm as builder WORKDIR app COPY . . +# Replace the Cargo configuration with the one used by collab. +COPY ./.cargo/collab-config.toml ./.cargo/config.toml + # Compile collab server ARG CARGO_PROFILE_RELEASE_PANIC=abort ARG GITHUB_SHA ENV GITHUB_SHA=$GITHUB_SHA + +# Also add `cmake`, since we need it to build `wasmtime`. +RUN apt-get update; \ + apt-get install -y --no-install-recommends cmake + RUN --mount=type=cache,target=./script/node_modules \ --mount=type=cache,target=/usr/local/cargo/registry \ --mount=type=cache,target=/usr/local/cargo/git \ diff --git a/.dockerignore b/Dockerfile-collab.dockerignore similarity index 100% rename from .dockerignore rename to Dockerfile-collab.dockerignore diff --git a/Dockerfile-cross b/Dockerfile-cross new file mode 100644 index 00000000000000..488309641caed5 --- /dev/null +++ b/Dockerfile-cross @@ -0,0 +1,17 @@ +# syntax=docker/dockerfile:1 + +ARG CROSS_BASE_IMAGE +FROM ${CROSS_BASE_IMAGE} +WORKDIR /app +ARG TZ=Etc/UTC \ + LANG=C.UTF-8 \ + LC_ALL=C.UTF-8 \ + DEBIAN_FRONTEND=noninteractive +ENV CARGO_TERM_COLOR=always + +COPY script/install-mold script/ +RUN ./script/install-mold "2.34.0" +COPY script/remote-server script/ +RUN ./script/remote-server + +COPY . . diff --git a/Dockerfile-cross.dockerignore b/Dockerfile-cross.dockerignore new file mode 100644 index 00000000000000..337b4d42623c48 --- /dev/null +++ b/Dockerfile-cross.dockerignore @@ -0,0 +1,16 @@ +.git +.github +**/.gitignore +**/.gitkeep +.gitattributes +.mailmap +**/target +zed.xcworkspace +.DS_Store +compose.yml +plugins/bin +script/node_modules +styles/node_modules +crates/collab/static/styles.css +vendor/bin +assets/themes/ diff --git a/Dockerfile-distros b/Dockerfile-distros new file mode 100644 index 00000000000000..c8a98d2f7db9bd --- /dev/null +++ b/Dockerfile-distros @@ -0,0 +1,26 @@ +# syntax=docker/dockerfile:1 + +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +WORKDIR /app +ARG TZ=Etc/UTC \ + LANG=C.UTF-8 \ + LC_ALL=C.UTF-8 \ + DEBIAN_FRONTEND=noninteractive +ENV CARGO_TERM_COLOR=always + +COPY script/linux script/ +RUN ./script/linux +COPY script/install-mold script/install-cmake script/ +RUN ./script/install-mold "2.34.0" +RUN ./script/install-cmake "3.30.4" + +COPY . . + +# When debugging, make these into individual RUN statements. +# Cleanup to avoid saving big layers we aren't going to use. +RUN . "$HOME/.cargo/env" \ + && cargo fetch \ + && cargo build \ + && cargo run -- --help \ + && cargo clean --quiet diff --git a/Dockerfile-distros.dockerignore b/Dockerfile-distros.dockerignore new file mode 100644 index 00000000000000..de70e0d16772e2 --- /dev/null +++ b/Dockerfile-distros.dockerignore @@ -0,0 +1,2 @@ +**/target +**/node_modules diff --git a/assets/icons/audio_off.svg b/assets/icons/audio_off.svg new file mode 100644 index 00000000000000..93b98471ca1a15 --- /dev/null +++ b/assets/icons/audio_off.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/audio_on.svg b/assets/icons/audio_on.svg new file mode 100644 index 00000000000000..42310ea32c289e --- /dev/null +++ b/assets/icons/audio_on.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/case_insensitive.svg b/assets/icons/case_sensitive.svg similarity index 100% rename from assets/icons/case_insensitive.svg rename to assets/icons/case_sensitive.svg diff --git a/assets/icons/x.svg b/assets/icons/close.svg similarity index 100% rename from assets/icons/x.svg rename to assets/icons/close.svg diff --git a/assets/icons/text-cursor.svg b/assets/icons/cursor_i_beam.svg similarity index 100% rename from assets/icons/text-cursor.svg rename to assets/icons/cursor_i_beam.svg diff --git a/assets/icons/diff.svg b/assets/icons/diff.svg new file mode 100644 index 00000000000000..ca43c379daa63c --- /dev/null +++ b/assets/icons/diff.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/feedback.svg b/assets/icons/envelope.svg similarity index 100% rename from assets/icons/feedback.svg rename to assets/icons/envelope.svg diff --git a/assets/icons/file_doc.svg b/assets/icons/file_doc.svg new file mode 100644 index 00000000000000..3b11995f36759e --- /dev/null +++ b/assets/icons/file_doc.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_generic.svg b/assets/icons/file_generic.svg new file mode 100644 index 00000000000000..3c72bd3320d9e8 --- /dev/null +++ b/assets/icons/file_generic.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_git.svg b/assets/icons/file_git.svg new file mode 100644 index 00000000000000..197db2e9e60f26 --- /dev/null +++ b/assets/icons/file_git.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_icons/file_types.json b/assets/icons/file_icons/file_types.json index fd0dc448c4e54f..bf95879b88a0e6 100644 --- a/assets/icons/file_icons/file_types.json +++ b/assets/icons/file_icons/file_types.json @@ -20,6 +20,7 @@ "bashrc": "terminal", "bmp": "image", "c": "c", + "c++": "cpp", "cc": "cpp", "cjs": "javascript", "coffee": "coffeescript", @@ -27,6 +28,7 @@ "cpp": "cpp", "css": "css", "csv": "storage", + "cxx": "cpp", "cts": "typescript", "dart": "dart", "dat": "storage", @@ -66,11 +68,13 @@ "heex": "elixir", "heic": "image", "heif": "image", + "hh": "cpp", "hpp": "cpp", "hrl": "erlang", "hs": "haskell", "htm": "template", "html": "template", + "hxx": "cpp", "ib": "storage", "ico": "image", "ini": "settings", @@ -124,6 +128,7 @@ "php": "php", "plist": "template", "png": "image", + "postcss": "css", "ppt": "document", "pptx": "document", "prettierignore": "prettier", diff --git a/assets/icons/file_lock.svg b/assets/icons/file_lock.svg new file mode 100644 index 00000000000000..6bfef249b4516f --- /dev/null +++ b/assets/icons/file_lock.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_rust.svg b/assets/icons/file_rust.svg new file mode 100644 index 00000000000000..5db753628af10c --- /dev/null +++ b/assets/icons/file_rust.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_toml.svg b/assets/icons/file_toml.svg new file mode 100644 index 00000000000000..9ab78af50f9302 --- /dev/null +++ b/assets/icons/file_toml.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/project.svg b/assets/icons/file_tree.svg similarity index 100% rename from assets/icons/project.svg rename to assets/icons/file_tree.svg diff --git a/assets/icons/folder.svg b/assets/icons/folder.svg new file mode 100644 index 00000000000000..a76dc63d1a6639 --- /dev/null +++ b/assets/icons/folder.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/folder_open.svg b/assets/icons/folder_open.svg new file mode 100644 index 00000000000000..ef37f55f83a38f --- /dev/null +++ b/assets/icons/folder_open.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/stop_sharing.svg b/assets/icons/folder_x.svg similarity index 100% rename from assets/icons/stop_sharing.svg rename to assets/icons/folder_x.svg diff --git a/assets/icons/conversations.svg b/assets/icons/message_bubbles.svg similarity index 100% rename from assets/icons/conversations.svg rename to assets/icons/message_bubbles.svg diff --git a/assets/icons/desktop.svg b/assets/icons/screen.svg similarity index 100% rename from assets/icons/desktop.svg rename to assets/icons/screen.svg diff --git a/assets/icons/settings.svg b/assets/icons/settings.svg new file mode 100644 index 00000000000000..081d25bf482472 --- /dev/null +++ b/assets/icons/settings.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/sliders-alt.svg b/assets/icons/settings_alt.svg similarity index 100% rename from assets/icons/sliders-alt.svg rename to assets/icons/settings_alt.svg diff --git a/assets/icons/sliders_alt.svg b/assets/icons/sliders_alt.svg new file mode 100644 index 00000000000000..36c3feccfede20 --- /dev/null +++ b/assets/icons/sliders_alt.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/sliders_vertical.svg b/assets/icons/sliders_vertical.svg new file mode 100644 index 00000000000000..ab61037a513e8c --- /dev/null +++ b/assets/icons/sliders_vertical.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/assets/icons/speaker_off.svg b/assets/icons/speaker_off.svg deleted file mode 100644 index f60c35de7f3f5b..00000000000000 --- a/assets/icons/speaker_off.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - diff --git a/assets/icons/text_select.svg b/assets/icons/text_snippet.svg similarity index 100% rename from assets/icons/text_select.svg rename to assets/icons/text_snippet.svg diff --git a/assets/icons/trash_alt.svg b/assets/icons/trash_alt.svg new file mode 100644 index 00000000000000..6867b421475a6e --- /dev/null +++ b/assets/icons/trash_alt.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/user_group_16.svg b/assets/icons/user_group.svg similarity index 100% rename from assets/icons/user_group_16.svg rename to assets/icons/user_group.svg diff --git a/assets/icons/word_search.svg b/assets/icons/whole_word.svg similarity index 100% rename from assets/icons/word_search.svg rename to assets/icons/whole_word.svg diff --git a/assets/icons/error.svg b/assets/icons/x_circle.svg similarity index 100% rename from assets/icons/error.svg rename to assets/icons/x_circle.svg diff --git a/assets/images/zed_logo.svg b/assets/images/zed_logo.svg new file mode 100644 index 00000000000000..d1769449c19840 --- /dev/null +++ b/assets/images/zed_logo.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/assets/images/zed_x_copilot.svg b/assets/images/zed_x_copilot.svg new file mode 100644 index 00000000000000..3c5be71074c195 --- /dev/null +++ b/assets/images/zed_x_copilot.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index bd2ade4246b17c..fca38a45a436fc 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -56,6 +56,7 @@ "shift-tab": "editor::TabPrev", "ctrl-k": "editor::CutToEndOfLine", // "ctrl-t": "editor::Transpose", + "alt-q": "editor::Rewrap", "ctrl-backspace": "editor::DeleteToPreviousWordStart", "ctrl-delete": "editor::DeleteToNextWordEnd", "shift-delete": "editor::Cut", @@ -165,6 +166,7 @@ { "context": "AssistantPanel", "bindings": { + "ctrl-k c": "assistant::CopyCode", "ctrl-g": "search::SelectNextMatch", "ctrl-shift-g": "search::SelectPrevMatch", "alt-m": "assistant::ToggleModelSelector", @@ -194,7 +196,7 @@ } }, { - "context": "BufferSearchBar && in_replace", + "context": "BufferSearchBar && in_replace > Editor", "bindings": { "enter": "search::ReplaceNext", "ctrl-enter": "search::ReplaceAll" @@ -245,6 +247,8 @@ "bindings": { "ctrl-pageup": "pane::ActivatePrevItem", "ctrl-pagedown": "pane::ActivateNextItem", + "ctrl-shift-pageup": "pane::SwapItemLeft", + "ctrl-shift-pagedown": "pane::SwapItemRight", "ctrl-w": "pane::CloseActiveItem", "ctrl-f4": "pane::CloseActiveItem", "alt-ctrl-t": "pane::CloseInactiveItems", @@ -306,6 +310,11 @@ "ctrl-shift-\\": "editor::MoveToEnclosingBracket", "ctrl-shift-[": "editor::Fold", "ctrl-shift-]": "editor::UnfoldLines", + "ctrl-k ctrl-l": "editor::ToggleFold", + "ctrl-k ctrl-[": "editor::FoldRecursive", + "ctrl-k ctrl-]": "editor::UnfoldRecursive", + "ctrl-k ctrl-0": "editor::FoldAll", + "ctrl-k ctrl-j": "editor::UnfoldAll", "ctrl-space": "editor::ShowCompletions", "ctrl-.": "editor::ToggleCodeActions", "alt-ctrl-r": "editor::RevealInFileManager", @@ -516,6 +525,13 @@ "alt-enter": "editor::Newline" } }, + { + "context": "PromptEditor", + "bindings": { + "ctrl-[": "assistant::CyclePreviousInlineAssist", + "ctrl-]": "assistant::CycleNextInlineAssist" + } + }, { "context": "ProjectSearchBar && !in_replace", "bindings": { @@ -553,6 +569,7 @@ "ctrl-backspace": ["project_panel::Delete", { "skip_prompt": false }], "ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }], "alt-ctrl-r": "project_panel::RevealInFileManager", + "ctrl-shift-enter": "project_panel::OpenWithSystem", "alt-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrev", @@ -647,7 +664,8 @@ "shift-up": "terminal::ScrollLineUp", "shift-down": "terminal::ScrollLineDown", "shift-home": "terminal::ScrollToTop", - "shift-end": "terminal::ScrollToBottom" + "shift-end": "terminal::ScrollToBottom", + "ctrl-shift-space": "terminal::ToggleViMode" } }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index dec5cbd9f398b9..c39b7c06daed59 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -51,6 +51,7 @@ "shift-tab": "editor::TabPrev", "ctrl-k": "editor::CutToEndOfLine", "ctrl-t": "editor::Transpose", + "alt-q": "editor::Rewrap", "cmd-backspace": "editor::DeleteToBeginningOfLine", "cmd-delete": "editor::DeleteToEndOfLine", "alt-backspace": "editor::DeleteToPreviousWordStart", @@ -187,6 +188,7 @@ { "context": "AssistantPanel", "bindings": { + "cmd-k c": "assistant::CopyCode", "cmd-g": "search::SelectNextMatch", "cmd-shift-g": "search::SelectPrevMatch", "alt-m": "assistant::ToggleModelSelector", @@ -230,7 +232,7 @@ } }, { - "context": "BufferSearchBar && in_replace", + "context": "BufferSearchBar && in_replace > Editor", "bindings": { "enter": "search::ReplaceNext", "cmd-enter": "search::ReplaceAll" @@ -285,6 +287,8 @@ "cmd-}": "pane::ActivateNextItem", "alt-cmd-left": "pane::ActivatePrevItem", "alt-cmd-right": "pane::ActivateNextItem", + "ctrl-shift-pageup": "pane::SwapItemLeft", + "ctrl-shift-pagedown": "pane::SwapItemRight", "cmd-w": "pane::CloseActiveItem", "alt-cmd-t": "pane::CloseInactiveItems", "ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes", @@ -343,6 +347,11 @@ "cmd-shift-\\": "editor::MoveToEnclosingBracket", "alt-cmd-[": "editor::Fold", "alt-cmd-]": "editor::UnfoldLines", + "cmd-k cmd-l": "editor::ToggleFold", + "cmd-k cmd-[": "editor::FoldRecursive", + "cmd-k cmd-]": "editor::UnfoldRecursive", + "cmd-k cmd-0": "editor::FoldAll", + "cmd-k cmd-j": "editor::UnfoldAll", "ctrl-space": "editor::ShowCompletions", "cmd-.": "editor::ToggleCodeActions", "alt-cmd-r": "editor::RevealInFileManager", @@ -386,6 +395,7 @@ // Change the default action on `menu::Confirm` by setting the parameter // "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }], "alt-cmd-o": "projects::OpenRecent", + "ctrl-cmd-o": "projects::OpenRemote", "alt-cmd-b": "branches::OpenRecent", "ctrl-~": "workspace::NewTerminal", "cmd-s": "workspace::Save", @@ -431,7 +441,12 @@ "cmd-k shift-right": ["workspace::SwapPaneInDirection", "Right"], "cmd-k shift-up": ["workspace::SwapPaneInDirection", "Up"], "cmd-k shift-down": ["workspace::SwapPaneInDirection", "Down"], - "cmd-shift-x": "zed::Extensions", + "cmd-shift-x": "zed::Extensions" + } + }, + { + "context": "Workspace && !Terminal", + "bindings": { "alt-t": "task::Rerun", "alt-shift-t": "task::Spawn" } @@ -523,6 +538,13 @@ "ctrl-enter": "assistant::InlineAssist" } }, + { + "context": "PromptEditor", + "bindings": { + "ctrl-[": "assistant::CyclePreviousInlineAssist", + "ctrl-]": "assistant::CycleNextInlineAssist" + } + }, { "context": "ProjectSearchBar && !in_replace", "bindings": { @@ -563,8 +585,8 @@ "cmd-backspace": ["project_panel::Trash", { "skip_prompt": true }], "cmd-delete": ["project_panel::Delete", { "skip_prompt": false }], "alt-cmd-r": "project_panel::RevealInFileManager", + "ctrl-shift-enter": "project_panel::OpenWithSystem", "cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }], - "alt-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrev", @@ -657,7 +679,8 @@ "cmd-home": "terminal::ScrollToTop", "cmd-end": "terminal::ScrollToBottom", "shift-home": "terminal::ScrollToTop", - "shift-end": "terminal::ScrollToBottom" + "shift-end": "terminal::ScrollToBottom", + "ctrl-shift-space": "terminal::ToggleViMode" } } ] diff --git a/assets/keymaps/macos/jetbrains.json b/assets/keymaps/macos/jetbrains.json index 1d5f1181f48a6b..f3c69a76fb6a2f 100644 --- a/assets/keymaps/macos/jetbrains.json +++ b/assets/keymaps/macos/jetbrains.json @@ -34,7 +34,7 @@ "cmd-]": "pane::GoForward", "alt-f7": "editor::FindAllReferences", "cmd-alt-f7": "editor::FindAllReferences", - "cmd-b": "editor::GoToDefinition", + "cmd-b": "editor::GoToDefinition", // Conflicts with workspace::ToggleLeftDock "cmd-alt-b": "editor::GoToDefinitionSplit", "cmd-shift-b": "editor::GoToTypeDefinition", "cmd-alt-shift-b": "editor::GoToTypeDefinitionSplit", @@ -64,7 +64,8 @@ "cmd-shift-o": "file_finder::Toggle", "cmd-shift-a": "command_palette::Toggle", "shift shift": "command_palette::Toggle", - "cmd-alt-o": "project_symbols::Toggle", + "cmd-alt-o": "project_symbols::Toggle", // JetBrains: Go to Symbol + "cmd-o": "project_symbols::Toggle", // JetBrains: Go to Class "cmd-1": "workspace::ToggleLeftDock", "cmd-6": "diagnostics::Deploy" } diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index f863e8488a0e28..8b2a728df3fccf 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -128,13 +128,23 @@ "shift-m": "vim::WindowMiddle", "shift-l": "vim::WindowBottom", // z commands + "z enter": ["workspace::SendKeystrokes", "z t ^"], + "z -": ["workspace::SendKeystrokes", "z b ^"], + "z ^": ["workspace::SendKeystrokes", "shift-h k z b ^"], + "z +": ["workspace::SendKeystrokes", "shift-l j z t ^"], "z t": "editor::ScrollCursorTop", "z z": "editor::ScrollCursorCenter", "z .": ["workspace::SendKeystrokes", "z z ^"], "z b": "editor::ScrollCursorBottom", + "z a": "editor::ToggleFold", + "z A": "editor::ToggleFoldRecursive", "z c": "editor::Fold", + "z C": "editor::FoldRecursive", "z o": "editor::UnfoldLines", + "z O": "editor::UnfoldRecursive", "z f": "editor::FoldSelectedRanges", + "z M": "editor::FoldAll", + "z R": "editor::UnfoldAll", "shift-z shift-q": ["pane::CloseActiveItem", { "saveIntent": "skip" }], "shift-z shift-z": ["pane::CloseActiveItem", { "saveIntent": "saveAll" }], // Count support @@ -239,11 +249,14 @@ "g shift-u": ["vim::PushOperator", "Uppercase"], "g ~": ["vim::PushOperator", "OppositeCase"], "\"": ["vim::PushOperator", "Register"], + "g q": ["vim::PushOperator", "Rewrap"], + "g w": ["vim::PushOperator", "Rewrap"], "q": "vim::ToggleRecord", "shift-q": "vim::ReplayLastRecording", "@": ["vim::PushOperator", "ReplayRegister"], "ctrl-pagedown": "pane::ActivateNextItem", "ctrl-pageup": "pane::ActivatePrevItem", + "insert": "vim::InsertBefore", // tree-sitter related commands "[ x": "editor::SelectLargerSyntaxNode", "] x": "editor::SelectSmallerSyntaxNode", @@ -290,6 +303,8 @@ "g ctrl-x": ["vim::Decrement", { "step": true }], "shift-i": "vim::InsertBefore", "shift-a": "vim::InsertAfter", + "g I": "vim::VisualInsertFirstNonWhiteSpace", + "g A": "vim::VisualInsertEndOfLine", "shift-j": "vim::JoinLines", "r": ["vim::PushOperator", "Replace"], "ctrl-c": ["vim::SwitchMode", "Normal"], @@ -300,6 +315,7 @@ "i": ["vim::PushOperator", { "Object": { "around": false } }], "a": ["vim::PushOperator", { "Object": { "around": true } }], "g c": "vim::ToggleComments", + "g q": "vim::Rewrap", "\"": ["vim::PushOperator", "Register"], // tree-sitter related commands "[ x": "editor::SelectLargerSyntaxNode", @@ -323,7 +339,8 @@ "ctrl-t": "vim::Indent", "ctrl-d": "vim::Outdent", "ctrl-k": ["vim::PushOperator", { "Digraph": {} }], - "ctrl-r": ["vim::PushOperator", "Register"] + "ctrl-r": ["vim::PushOperator", "Register"], + "insert": "vim::ToggleReplace" } }, { @@ -342,7 +359,8 @@ "ctrl-k": ["vim::PushOperator", { "Digraph": {} }], "backspace": "vim::UndoReplace", "tab": "vim::Tab", - "enter": "vim::Enter" + "enter": "vim::Enter", + "insert": "vim::InsertBefore" } }, { @@ -427,6 +445,15 @@ "~": "vim::CurrentLine" } }, + { + "context": "vim_operator == gq", + "bindings": { + "g q": "vim::CurrentLine", + "q": "vim::CurrentLine", + "g w": "vim::CurrentLine", + "w": "vim::CurrentLine" + } + }, { "context": "vim_operator == y", "bindings": { @@ -493,6 +520,7 @@ "v": "project_panel::OpenPermanent", "p": "project_panel::Open", "x": "project_panel::RevealInFileManager", + "s": "project_panel::OpenWithSystem", "shift-g": "menu::SelectLast", "g g": "menu::SelectFirst", "-": "project_panel::SelectParent", diff --git a/assets/prompts/content_prompt.hbs b/assets/prompts/content_prompt.hbs index cf4141349b356c..c029f84b24c36e 100644 --- a/assets/prompts/content_prompt.hbs +++ b/assets/prompts/content_prompt.hbs @@ -47,6 +47,20 @@ And here's the section to rewrite based on that prompt again for reference: {{{rewrite_section}}} + +{{#if diagnostic_errors}} +{{#each diagnostic_errors}} + +Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to. + + + {{line_number}} + {{error_message}} + {{code_content}} + +{{/each}} +{{/if}} + {{/if}} Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved. diff --git a/assets/prompts/edit_workflow.hbs b/assets/prompts/edit_workflow.hbs index c558bc20d0e143..99a594cdd88e4c 100644 --- a/assets/prompts/edit_workflow.hbs +++ b/assets/prompts/edit_workflow.hbs @@ -1,85 +1,33 @@ -# Code Change Workflow - -Your task is to guide the user through code changes using a series of steps. Each step should describe a high-level change, which can consist of multiple edits to distinct locations in the codebase. - -## Output Example - -Provide output as XML, with the following format: - - -Update the Person struct to store an age - -```rust -struct Person { - // existing fields... - age: u8, - height: f32, - // existing fields... -} - -impl Person { - fn age(&self) -> u8 { - self.age - } -} -``` - - -src/person.rs -insert_before -height: f32, -Add the age field - - - -src/person.rs -insert_after -impl Person { -Add the age getter - - - -## Output Format - -First, each `` must contain a written description of the change that should be made. The description should begin with a high-level overview, and can contain markdown code blocks as well. The description should be self-contained and actionable. - -After the description, each `` must contain one or more `` tags, each of which refer to a specific range in a source file. Each `` tag must contain the following child tags: - -### `` (required) - -This tag contains the path to the file that will be changed. It can be an existing path, or a path that should be created. - -### `` (optional) - -This tag contains a search string to locate in the source file, e.g. `pub fn baz() {`. If not provided, the new content will be inserted at the top of the file. Make sure to produce a string that exists in the source file and that isn't ambiguous. When there's ambiguity, add more lines to the search to eliminate it. - -### `` (required) - -This tag contains a single-line description of the edit that should be made at the given location. - -### `` (required) - -This tag indicates what type of change should be made, relative to the given location. It can be one of the following: -- `update`: Rewrites the specified string entirely based on the given description. -- `create`: Creates a new file with the given path based on the provided description. -- `insert_before`: Inserts new text based on the given description before the specified search string. -- `insert_after`: Inserts new text based on the given description after the specified search string. -- `delete`: Deletes the specified string from the containing file. +The user of a code editor wants to make a change to their codebase. +You must describe the change using the following XML structure: + +- - A group of related code changes. + Child tags: + - (required) - A high-level description of the changes. This should be as short + as possible, possibly using common abbreviations. + - <edit> (1 or more) - An edit to make at a particular range within a file. + Includes the following child tags: + - <path> (required) - The path to the file that will be changed. + - <description> (optional) - An arbitrarily-long comment that describes the purpose + of this edit. + - <old_text> (optional) - An excerpt from the file's current contents that uniquely + identifies a range within the file where the edit should occur. If this tag is not + specified, then the entire file will be used as the range. + - <new_text> (required) - The new text to insert into the file. + - <operation> (required) - The type of change that should occur at the given range + of the file. Must be one of the following values: + - `update`: Replaces the entire range with the new text. + - `insert_before`: Inserts the new text before the range. + - `insert_after`: Inserts new text after the range. + - `create`: Creates a new file with the given path and the new text. + - `delete`: Deletes the specified range from the file. <guidelines> -- There's no need to describe *what* to do, just *where* to do it. -- Only reference locations that actually exist (unless you're creating a file). -- If creating a file, assume any subsequent updates are included at the time of creation. -- Don't create and then update a file. Always create new files in one hot. -- Prefer multiple edits to smaller regions, as opposed to one big edit to a larger region. -- Don't produce edits that intersect each other. In that case, merge them into a bigger edit. -- Never nest an edit with another edit. Never include CDATA. All edits are leaf nodes. -- Descriptions are required for all edits except delete. -- When generating multiple edits, ensure the descriptions are specific to each individual operation. -- Avoid referring to the search string in the description. Focus on the change to be made, not the location where it's made. That's implicit with the `search` string you provide. -- Don't generate multiple edits at the same location. Instead, combine them together in a single edit with a succinct combined description. +- Never provide multiple edits whose ranges intersect each other. Instead, merge them into one edit. +- Prefer multiple edits to smaller, disjoint ranges, rather than one edit to a larger range. +- There's no need to escape angle brackets within XML tags. - Always ensure imports are added if you're referencing symbols that are not in scope. </guidelines> @@ -124,189 +72,137 @@ Update all shapes to store their origin as an (x, y) tuple and implement Display <message role="assistant"> We'll need to update both the rectangle and circle modules. -<step> -Add origin fields to both shape types. - -```rust -struct Rectangle { - // existing fields ... - origin: (f64, f64), -} -``` - -```rust -struct Circle { - // existing fields ... - origin: (f64, f64), -} -``` - +<patch> +<title>Add origins and display impls to shapes src/shapes/rectangle.rs -insert_before - - width: f64, - height: f64, - -Add the origin field to Rectangle +Add the origin field to Rectangle struct +insert_after + +pub struct Rectangle { + + +origin: (f64, f64), + -src/shapes/circle.rs -insert_before - - radius: f64, - -Add the origin field to Circle +src/shapes/rectangle.rs +Update the Rectangle's new function to take an origin parameter +update + +fn new(width: f64, height: f64) -> Self { + Rectangle { width, height } +} + + +fn new(origin: (f64, f64), width: f64, height: f64) -> Self { + Rectangle { origin, width, height } +} + - -Update both shape's constructors to take an origin. - -src/shapes/rectangle.rs -update - - fn new(width: f64, height: f64) -> Self { - Rectangle { width, height } - } - -Update the Rectangle new function to take an origin +src/shapes/circle.rs +Add the origin field to Circle struct +insert_after + +pub struct Circle { + radius: f64, + + + origin: (f64, f64), + src/shapes/circle.rs +Update the Circle's new function to take an origin parameter update - - fn new(radius: f64) -> Self { - Circle { radius } - } - -Update the Circle new function to take an origin + +fn new(radius: f64) -> Self { + Circle { radius } +} + + +fn new(origin: (f64, f64), radius: f64) -> Self { + Circle { origin, radius } +} + - -Implement Display for both shapes - src/shapes/rectangle.rs +Add an import for the std::fmt module insert_before - + struct Rectangle { - -Add an import for the `std::fmt` module + + +use std::fmt; + + src/shapes/rectangle.rs +Add a Display implementation for Rectangle insert_after - + Rectangle { width, height } } } - -Add a Display implementation for Rectangle + + +impl fmt::Display for Rectangle { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.format_struct(f, "Rectangle") + .field("origin", &self.origin) + .field("width", &self.width) + .field("height", &self.height) + .finish() + } +} + src/shapes/circle.rs +Add an import for the `std::fmt` module insert_before - + struct Circle { - -Add an import for the `std::fmt` module + + +use std::fmt; + src/shapes/circle.rs +Add a Display implementation for Circle insert_after - + Circle { radius } } } - -Add a Display implementation for Circle - - - - - - - - -```rs src/user.rs -struct User { - pub name: String, - age: u32, - email: String, -} - -impl User { - fn new(name: String, age: u32, email: String) -> Self { - User { name, age, email } - } - - pub fn print_info(&self) { - todo!() - } -} -``` - -Let's print all the user information and delete the email field. - - - - -Update the 'print_info' method to print user information - -```rust -impl User { - // ... other methods ... - - pub fn print_info(&self) { - println!("Name: {name}, Age: {age}", name = self.name, age = self.age); + + +impl fmt::Display for Rectangle { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.format_struct(f, "Rectangle") + .field("origin", &self.origin) + .field("width", &self.width) + .field("height", &self.height) + .finish() } } -``` - - -src/user.rs -update - - pub fn print_info(&self) { - todo!() - } - -Print all the user information - - - - -Remove the 'email' field from the User struct - - -src/user.rs -delete - -email: String, - + + - -src/user.rs -update - -fn new(name: String, age: u32, email: String) -> Self { - User { name, age, email } -} - -Remove email parameter from new method - - -You should think step by step. When possible, produce smaller, coherent logical steps as opposed to one big step that combines lots of heterogeneous edits. - diff --git a/assets/prompts/project_slash_command.hbs b/assets/prompts/project_slash_command.hbs new file mode 100644 index 00000000000000..6c63f71d895274 --- /dev/null +++ b/assets/prompts/project_slash_command.hbs @@ -0,0 +1,8 @@ +A software developer is asking a question about their project. The source files in their project have been indexed into a database of semantic text embeddings. +Your task is to generate a list of 4 diverse search queries that can be run on this embedding database, in order to retrieve a list of code snippets +that are relevant to the developer's question. Redundant search queries will be heavily penalized, so only include another query if it's sufficiently +distinct from previous ones. + +Here is the question that's been asked, together with context that the developer has added manually: + +{{{context_buffer}}} diff --git a/assets/prompts/step_resolution.hbs b/assets/prompts/step_resolution.hbs deleted file mode 100644 index 10bbdec81e9d51..00000000000000 --- a/assets/prompts/step_resolution.hbs +++ /dev/null @@ -1,496 +0,0 @@ - -Your task is to map a step from a workflow to locations in source code where code needs to be changed to fulfill that step. -Given a workflow containing background context plus a series of tags, you will resolve *one* of these step tags to resolve to one or more locations in the code. -With each location, you will produce a brief, one-line description of the changes to be made. - - -- There's no need to describe *what* to do, just *where* to do it. -- Only reference locations that actually exist (unless you're creating a file). -- If creating a file, assume any subsequent updates are included at the time of creation. -- Don't create and then update a file. Always create new files in shot. -- Prefer updating symbols lower in the syntax tree if possible. -- Never include suggestions on a parent symbol and one of its children in the same suggestions block. -- Never nest an operation with another operation or include CDATA or other content. All suggestions are leaf nodes. -- Descriptions are required for all suggestions except delete. -- When generating multiple suggestions, ensure the descriptions are specific to each individual operation. -- Avoid referring to the location in the description. Focus on the change to be made, not the location where it's made. That's implicit with the symbol you provide. -- Don't generate multiple suggestions at the same location. Instead, combine them together in a single operation with a succinct combined description. -- To add imports respond with a suggestion where the `"symbol"` key is set to `"#imports"` - - - - - - - -```rs src/rectangle.rs -struct Rectangle { - width: f64, - height: f64, -} - -impl Rectangle { - fn new(width: f64, height: f64) -> Self { - Rectangle { width, height } - } -} -``` - -We need to add methods to calculate the area and perimeter of the rectangle. Can you help with that? - - -Sure, I can help with that! - -Add new methods 'calculate_area' and 'calculate_perimeter' to the Rectangle struct -Implement the 'Display' trait for the Rectangle struct - - - - -Add new methods 'calculate_area' and 'calculate_perimeter' to the Rectangle struct - - - -{ - "title": "Add Rectangle methods", - "suggestions": [ - { - "kind": "AppendChild", - "path": "src/shapes.rs", - "symbol": "impl Rectangle", - "description": "Add calculate_area method" - }, - { - "kind": "AppendChild", - "path": "src/shapes.rs", - "symbol": "impl Rectangle", - "description": "Add calculate_perimeter method" - } - ] -} - - - -{ - "title": "Add Rectangle methods", - "suggestions": [ - { - "kind": "AppendChild", - "path": "src/shapes.rs", - "symbol": "impl Rectangle", - "description": "Add calculate area and perimeter methods" - } - ] -} - - - -Implement the 'Display' trait for the Rectangle struct - - - -{ - "title": "Implement Display for Rectangle", - "suggestions": [ - { - "kind": "InsertSiblingAfter", - "path": "src/shapes.rs", - "symbol": "impl Rectangle", - "description": "Implement Display trait for Rectangle" - } - ] -} - - - - - -```rs src/user.rs -struct User { - pub name: String, - age: u32, - email: String, -} - -impl User { - fn new(name: String, age: u32, email: String) -> Self { - User { name, age, email } - } - - pub fn print_info(&self) { - println!("Name: {}, Age: {}, Email: {}", self.name, self.age, self.email); - } -} -``` - - -Certainly! -Update the 'print_info' method to use formatted output -Remove the 'email' field from the User struct - - - - -Update the 'print_info' method to use formatted output - - - -{ - "title": "Use formatted output", - "suggestions": [ - { - "kind": "Update", - "path": "src/user.rs", - "symbol": "impl User pub fn print_info", - "description": "Use formatted output" - } - ] -} - - - -Remove the 'email' field from the User struct - - - -{ - "title": "Remove email field", - "suggestions": [ - { - "kind": "Delete", - "path": "src/user.rs", - "symbol": "struct User email" - } - ] -} - - - - - - -```rs src/vehicle.rs -struct Vehicle { - make: String, - model: String, - year: u32, -} - -impl Vehicle { - fn new(make: String, model: String, year: u32) -> Self { - Vehicle { make, model, year } - } - - fn print_year(&self) { - println!("Year: {}", self.year); - } -} -``` - - -Add a 'use std::fmt;' statement at the beginning of the file -Add a new method 'start_engine' in the Vehicle impl block - - - - -Add a 'use std::fmt;' statement at the beginning of the file - - - -{ - "title": "Add use std::fmt statement", - "suggestions": [ - { - "kind": "PrependChild", - "path": "src/vehicle.rs", - "symbol": "#imports", - "description": "Add 'use std::fmt' statement" - } - ] -} - - - -Add a new method 'start_engine' in the Vehicle impl block - - - -{ - "title": "Add start_engine method", - "suggestions": [ - { - "kind": "InsertSiblingAfter", - "path": "src/vehicle.rs", - "symbol": "impl Vehicle fn new", - "description": "Add start_engine method" - } - ] -} - - - - - - -```rs src/employee.rs -struct Employee { - name: String, - position: String, - salary: u32, - department: String, -} - -impl Employee { - fn new(name: String, position: String, salary: u32, department: String) -> Self { - Employee { name, position, salary, department } - } - - fn print_details(&self) { - println!("Name: {}, Position: {}, Salary: {}, Department: {}", - self.name, self.position, self.salary, self.department); - } - - fn give_raise(&mut self, amount: u32) { - self.salary += amount; - } -} -``` - - -Make salary an f32 -Remove the 'department' field and update the 'print_details' method - - - - -Make salary an f32 - - - -{ - "title": "Change salary to f32", - "suggestions": [ - { - "kind": "Update", - "path": "src/employee.rs", - "symbol": "struct Employee", - "description": "Change the type of salary to an f32" - }, - { - "kind": "Update", - "path": "src/employee.rs", - "symbol": "struct Employee salary", - "description": "Change the type to an f32" - } - ] -} - - - -{ - "title": "Change salary to f32", - "suggestions": [ - { - "kind": "Update", - "path": "src/employee.rs", - "symbol": "struct Employee salary", - "description": "Change the type to an f32" - } - ] -} - - - -Remove the 'department' field and update the 'print_details' method - - - -{ - "title": "Remove department", - "suggestions": [ - { - "kind": "Delete", - "path": "src/employee.rs", - "symbol": "struct Employee department" - }, - { - "kind": "Update", - "path": "src/employee.rs", - "symbol": "impl Employee fn print_details", - "description": "Don't print the 'department' field" - } - ] -} - - - - - - -```rs src/game.rs -struct Player { - name: String, - health: i32, - pub score: u32, -} - -impl Player { - pub fn new(name: String) -> Self { - Player { name, health: 100, score: 0 } - } -} - -struct Game { - players: Vec, -} - -impl Game { - fn new() -> Self { - Game { players: Vec::new() } - } -} -``` - - -Add a 'level' field to Player and update the 'new' method - - - - -Add a 'level' field to Player and update the 'new' method - - - -{ - "title": "Add level field to Player", - "suggestions": [ - { - "kind": "InsertSiblingAfter", - "path": "src/game.rs", - "symbol": "struct Player pub score", - "description": "Add level field to Player" - }, - { - "kind": "Update", - "path": "src/game.rs", - "symbol": "impl Player pub fn new", - "description": "Initialize level in new method" - } - ] -} - - - - - - -```rs src/config.rs -use std::collections::HashMap; - -struct Config { - settings: HashMap, -} - -impl Config { - fn new() -> Self { - Config { settings: HashMap::new() } - } -} -``` - - -Add a 'load_from_file' method to Config and import necessary modules - - - - -Add a 'load_from_file' method to Config and import necessary modules - - - -{ - "title": "Add load_from_file method", - "suggestions": [ - { - "kind": "PrependChild", - "path": "src/config.rs", - "symbol": "#imports", - "description": "Import std::fs and std::io modules" - }, - { - "kind": "AppendChild", - "path": "src/config.rs", - "symbol": "impl Config", - "description": "Add load_from_file method" - } - ] -} - - - - - - -```rs src/database.rs -pub(crate) struct Database { - connection: Connection, -} - -impl Database { - fn new(url: &str) -> Result { - let connection = Connection::connect(url)?; - Ok(Database { connection }) - } - - async fn query(&self, sql: &str) -> Result, Error> { - self.connection.query(sql, &[]) - } -} -``` - - -Add error handling to the 'query' method and create a custom error type - - - - -Add error handling to the 'query' method and create a custom error type - - - -{ - "title": "Add error handling to query", - "suggestions": [ - { - "kind": "PrependChild", - "path": "src/database.rs", - "description": "Import necessary error handling modules" - }, - { - "kind": "InsertSiblingBefore", - "path": "src/database.rs", - "symbol": "pub(crate) struct Database", - "description": "Define custom DatabaseError enum" - }, - { - "kind": "Update", - "path": "src/database.rs", - "symbol": "impl Database async fn query", - "description": "Implement error handling in query method" - } - ] -} - - - - -Now generate the suggestions for the following step: - - -{{{workflow_context}}} - - - -{{{step_to_resolve}}} - diff --git a/assets/settings/default.json b/assets/settings/default.json index af498ce2d5c8a0..e104712eab81ee 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -15,9 +15,11 @@ // text editor: // // 1. "VSCode" - // 2. "JetBrains" - // 3. "SublimeText" - // 4. "Atom" + // 2. "Atom" + // 3. "JetBrains" + // 4. "None" + // 5. "SublimeText" + // 6. "TextMate" "base_keymap": "VSCode", // Features that can be globally enabled or disabled "features": { @@ -113,6 +115,18 @@ "use_system_path_prompts": true, // Whether the cursor blinks in the editor. "cursor_blink": true, + // Cursor shape for the default editor. + // 1. A vertical bar + // "bar" + // 2. A block that surrounds the following character + // "block" + // 3. An underline / underscore that runs along the following character + // "underline" + // 4. A box drawn around the following character + // "hollow" + // + // Default: not set, defaults to "bar" + "cursor_shape": null, // How to highlight the current line in the editor. // // 1. Don't highlight the current line: @@ -308,6 +322,10 @@ "show_parameter_hints": true, // Corresponds to null/None LSP hint type value. "show_other_hints": true, + // Whether to show a background for inlay hints. + // + // If set to `true`, the background will use the `hint.background` color from the current theme. + "show_background": false, // Time to wait after editing the buffer, before requesting the hints, // set to 0 to disable debouncing. "edit_debounce_ms": 700, @@ -340,9 +358,19 @@ /// Scrollbar-related settings "scrollbar": { /// When to show the scrollbar in the project panel. + /// This setting can take four values: /// - /// Default: always - "show": "always" + /// 1. null (default): Inherit editor settings + /// 2. Show the scrollbar if there's important information or + /// follow the system's configured behavior (default): + /// "auto" + /// 3. Match the system's configured behavior: + /// "system" + /// 4. Always show the scrollbar: + /// "always" + /// 5. Never show the scrollbar: + /// "never" + "show": null } }, "outline_panel": { @@ -468,7 +496,14 @@ // Position of the close button on the editor tabs. "close_position": "right", // Whether to show the file icon for a tab. - "file_icons": false + "file_icons": false, + // What to do after closing the current tab. + // + // 1. Activate the tab that was open previously (default) + // "History" + // 2. Activate the neighbour tab (prefers the right one, if present) + // "Neighbour" + "activate_on_close": "history" }, // Settings related to preview tabs. "preview_tabs": { @@ -482,6 +517,11 @@ // Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. "enable_preview_from_code_navigation": false }, + // Settings related to the file finder. + "file_finder": { + // Whether to show file icons in the file finder. + "file_icons": true + }, // Whether or not to remove any trailing whitespace from lines of a buffer // before saving it. "remove_trailing_whitespace_on_save": true, @@ -514,17 +554,16 @@ // How to soft-wrap long lines of text. // Possible values: // - // 1. Do not soft wrap. + // 1. Prefer a single line generally, unless an overly long line is encountered. // "soft_wrap": "none", - // 2. Prefer a single line generally, unless an overly long line is encountered. - // "soft_wrap": "prefer_line", - // 3. Soft wrap lines that overflow the editor. + // "soft_wrap": "prefer_line", // (deprecated, same as "none") + // 2. Soft wrap lines that overflow the editor. // "soft_wrap": "editor_width", - // 4. Soft wrap lines at the preferred line length. + // 3. Soft wrap lines at the preferred line length. // "soft_wrap": "preferred_line_length", - // 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller). + // 4. Soft wrap lines at the preferred line length or the editor width (whichever is smaller). // "soft_wrap": "bounded", - "soft_wrap": "prefer_line", + "soft_wrap": "none", // The column at which to soft-wrap lines, for buffers where soft-wrap // is enabled. "preferred_line_length": 80, @@ -579,13 +618,11 @@ } }, // Configuration for how direnv configuration should be loaded. May take 2 values: - // 1. Load direnv configuration through the shell hook, works for POSIX shells and fish. - // "load_direnv": "shell_hook" - // 2. Load direnv configuration using `direnv export json` directly. - // This can help with some shells that otherwise would not detect - // the direnv environment, such as nushell or elvish. + // 1. Load direnv configuration using `direnv export json` directly. // "load_direnv": "direct" - "load_direnv": "shell_hook", + // 2. Load direnv configuration through the shell hook, works for POSIX shells and fish. + // "load_direnv": "shell_hook" + "load_direnv": "direct", "inline_completions": { // A list of globs representing files that inline completions should be disabled for. "disabled_globs": [".env"] @@ -651,6 +688,18 @@ // 3. Always blink the cursor, ignoring the terminal mode // "blinking": "on", "blinking": "terminal_controlled", + // Default cursor shape for the terminal. + // 1. A block that surrounds the following character + // "block" + // 2. A vertical bar + // "bar" + // 3. An underline / underscore that runs along the following character + // "underline" + // 4. A box drawn around the following character + // "hollow" + // + // Default: not set, defaults to "block" + "cursor_shape": null, // Set whether Alternate Scroll mode (code: ?1007) is active by default. // Alternate Scroll mode converts mouse scroll events into up / down key // presses when in the alternate screen (e.g. when running applications @@ -665,10 +714,10 @@ // May take 2 values: // 1. Rely on default platform handling of option key, on macOS // this means generating certain unicode characters - // "option_to_meta": false, + // "option_as_meta": false, // 2. Make the option keys behave as a 'meta' key, e.g. for emacs - // "option_to_meta": true, - "option_as_meta": true, + // "option_as_meta": true, + "option_as_meta": false, // Whether or not selecting text in the terminal will automatically // copy to the system clipboard. "copy_on_select": false, @@ -700,7 +749,7 @@ // to the current working directory. We recommend overriding this // in your project's settings, rather than globally. "directories": [".env", "env", ".venv", "venv"], - // Can also be `csh`, `fish`, and `nushell` + // Can also be `csh`, `fish`, `nushell` and `power_shell` "activate_script": "default" } }, @@ -741,6 +790,7 @@ // } // "file_types": { + "Plain Text": ["txt"], "JSON": ["flake.lock"], "JSONC": [ "**/.zed/**/*.json", @@ -748,8 +798,24 @@ "**/Zed/**/*.json", "tsconfig.json", "pyrightconfig.json" - ] + ], + "TOML": ["uv.lock"] }, + /// By default use a recent system version of node, or install our own. + /// You can override this to use a version of node that is not in $PATH with: + /// { + /// "node": { + /// "node_path": "/path/to/node" + /// "npm_path": "/path/to/npm" (defaults to node_path/../npm) + /// } + /// } + /// or to ensure Zed always downloads and installs an isolated version of node: + /// { + /// "node": { + /// "ignore_system_version": true, + /// } + /// NOTE: changing this setting currently requires restarting Zed. + "node": {}, // The extensions that Zed should automatically install on startup. // // If you don't want any of these extensions, add this field to your settings @@ -760,6 +826,7 @@ // Different settings for specific languages. "languages": { "Astro": { + "language_servers": ["astro-language-server", "..."], "prettier": { "allowed": true, "plugins": ["prettier-plugin-astro"] @@ -783,6 +850,13 @@ "allowed": true } }, + "Dart": { + "tab_size": 2 + }, + "Diff": { + "remove_trailing_whitespace_on_save": false, + "ensure_final_newline_on_save": false + }, "Elixir": { "language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."] }, @@ -918,7 +992,8 @@ }, "openai": { "version": "1", - "api_url": "https://api.openai.com/v1" + "api_url": "https://api.openai.com/v1", + "low_speed_timeout_in_seconds": 600 } }, // Zed's Prettier integration settings. @@ -1014,7 +1089,7 @@ // environment variables. // // Examples: - // - "proxy": "socks5://localhost:10808" + // - "proxy": "socks5h://localhost:10808" // - "proxy": "http://127.0.0.1:10809" "proxy": null, // Set to configure aliases for the command palette. diff --git a/assets/settings/initial_server_settings.json b/assets/settings/initial_server_settings.json new file mode 100644 index 00000000000000..d6ec33e6012838 --- /dev/null +++ b/assets/settings/initial_server_settings.json @@ -0,0 +1,7 @@ +// Server-specific settings +// +// For a full list of overridable settings, and general information on settings, +// see the documentation: https://zed.dev/docs/configuring-zed#settings-files +{ + "lsp": {} +} diff --git a/assets/settings/initial_user_settings.json b/assets/settings/initial_user_settings.json index d8ac1a00216834..71f3beb1d6076e 100644 --- a/assets/settings/initial_user_settings.json +++ b/assets/settings/initial_user_settings.json @@ -5,7 +5,7 @@ // // To see all of Zed's default settings without changing your // custom settings, run `zed: open default settings` from the -// command palette +// command palette (cmd-shift-p / ctrl-shift-p) { "ui_font_size": 16, "buffer_font_size": 16, diff --git a/clippy.toml b/clippy.toml index 787620d865cc5e..8c8da03a26b63d 100644 --- a/clippy.toml +++ b/clippy.toml @@ -1 +1,2 @@ allow-private-module-inception = true +avoid-breaking-exported-api = false diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 4b6508edb074fd..8020e0665a1af1 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -10,7 +10,7 @@ use gpui::{ use language::{ LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId, LanguageServerName, }; -use project::{LanguageServerProgress, Project}; +use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId}; use smallvec::SmallVec; use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration}; use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle}; @@ -19,7 +19,10 @@ use workspace::{item::ItemHandle, StatusItemView, Workspace}; actions!(activity_indicator, [ShowErrorMessage]); pub enum Event { - ShowError { lsp_name: Arc, error: String }, + ShowError { + lsp_name: LanguageServerName, + error: String, + }, } pub struct ActivityIndicator { @@ -98,6 +101,7 @@ impl ActivityIndicator { None, cx, ); + buffer.set_capability(language::Capability::ReadOnly, cx); })?; workspace.update(&mut cx, |workspace, cx| { workspace.add_item_to_active_pane( @@ -123,7 +127,7 @@ impl ActivityIndicator { self.statuses.retain(|status| { if let LanguageServerBinaryStatus::Failed { error } = &status.status { cx.emit(Event::ShowError { - lsp_name: status.name.0.clone(), + lsp_name: status.name.clone(), error: error.clone(), }); false @@ -172,7 +176,31 @@ impl ActivityIndicator { .flatten() } + fn pending_environment_errors<'a>( + &'a self, + cx: &'a AppContext, + ) -> impl Iterator { + self.project.read(cx).shell_environment_errors(cx) + } + fn content_to_render(&mut self, cx: &mut ViewContext) -> Option { + // Show if any direnv calls failed + if let Some((&worktree_id, error)) = self.pending_environment_errors(cx).next() { + return Some(Content { + icon: Some( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .into_any_element(), + ), + message: error.0.clone(), + on_click: Some(Arc::new(move |this, cx| { + this.project.update(cx, |project, cx| { + project.remove_environment_error(cx, worktree_id); + }); + cx.dispatch_action(Box::new(workspace::OpenLog)); + })), + }); + } // Show any language server has pending activity. let mut pending_work = self.pending_language_server_work(cx); if let Some(PendingWork { @@ -224,10 +252,10 @@ impl ActivityIndicator { for status in &self.statuses { match status.status { LanguageServerBinaryStatus::CheckingForUpdate => { - checking_for_update.push(status.name.0.as_ref()) + checking_for_update.push(status.name.clone()) } - LanguageServerBinaryStatus::Downloading => downloading.push(status.name.0.as_ref()), - LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.0.as_ref()), + LanguageServerBinaryStatus::Downloading => downloading.push(status.name.clone()), + LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.clone()), LanguageServerBinaryStatus::None => {} } } @@ -239,8 +267,24 @@ impl ActivityIndicator { .size(IconSize::Small) .into_any_element(), ), - message: format!("Downloading {}...", downloading.join(", "),), - on_click: None, + message: format!( + "Downloading {}...", + downloading.iter().map(|name| name.0.as_ref()).fold( + String::new(), + |mut acc, s| { + if !acc.is_empty() { + acc.push_str(", "); + } + acc.push_str(s); + acc + } + ) + ), + on_click: Some(Arc::new(move |this, cx| { + this.statuses + .retain(|status| !downloading.contains(&status.name)); + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }); } @@ -253,22 +297,44 @@ impl ActivityIndicator { ), message: format!( "Checking for updates to {}...", - checking_for_update.join(", "), + checking_for_update.iter().map(|name| name.0.as_ref()).fold( + String::new(), + |mut acc, s| { + if !acc.is_empty() { + acc.push_str(", "); + } + acc.push_str(s); + acc + } + ), ), - on_click: None, + on_click: Some(Arc::new(move |this, cx| { + this.statuses + .retain(|status| !checking_for_update.contains(&status.name)); + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }); } if !failed.is_empty() { return Some(Content { icon: Some( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), message: format!( - "Failed to download {}. Click to show error.", - failed.join(", "), + "Failed to run {}. Click to show error.", + failed + .iter() + .map(|name| name.0.as_ref()) + .fold(String::new(), |mut acc, s| { + if !acc.is_empty() { + acc.push_str(", "); + } + acc.push_str(s); + acc + }), ), on_click: Some(Arc::new(|this, cx| { this.show_error_message(&Default::default(), cx) @@ -277,10 +343,10 @@ impl ActivityIndicator { } // Show any formatting failure - if let Some(failure) = self.project.read(cx).last_formatting_failure() { + if let Some(failure) = self.project.read(cx).last_formatting_failure(cx) { return Some(Content { icon: Some( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), @@ -301,7 +367,9 @@ impl ActivityIndicator { .into_any_element(), ), message: "Checking for Zed updates…".to_string(), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }), AutoUpdateStatus::Downloading => Some(Content { icon: Some( @@ -310,7 +378,9 @@ impl ActivityIndicator { .into_any_element(), ), message: "Downloading Zed update…".to_string(), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }), AutoUpdateStatus::Installing => Some(Content { icon: Some( @@ -319,7 +389,9 @@ impl ActivityIndicator { .into_any_element(), ), message: "Installing Zed update…".to_string(), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }), AutoUpdateStatus::Updated { binary_path } => Some(Content { icon: None, @@ -333,13 +405,13 @@ impl ActivityIndicator { }), AutoUpdateStatus::Errored => Some(Content { icon: Some( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), message: "Auto update failed".to_string(), on_click: Some(Arc::new(|this, cx| { - this.dismiss_error_message(&Default::default(), cx) + this.dismiss_error_message(&DismissErrorMessage, cx) })), }), AutoUpdateStatus::Idle => None, @@ -357,7 +429,9 @@ impl ActivityIndicator { .into_any_element(), ), message: format!("Updating {extension_id} extension…"), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }); } } diff --git a/crates/anthropic/Cargo.toml b/crates/anthropic/Cargo.toml index 9e48ad0e57d81d..32ac0fd9884034 100644 --- a/crates/anthropic/Cargo.toml +++ b/crates/anthropic/Cargo.toml @@ -20,13 +20,9 @@ anyhow.workspace = true chrono.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true strum.workspace = true thiserror.workspace = true util.workspace = true - -[dev-dependencies] -tokio.workspace = true diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index f960dc541a2866..08c8f27bd90276 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -6,9 +6,8 @@ use std::{pin::Pin, str::FromStr}; use anyhow::{anyhow, Context, Result}; use chrono::{DateTime, Utc}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; -use isahc::http::{HeaderMap, HeaderValue}; +use http_client::http::{HeaderMap, HeaderValue}; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; use strum::{EnumIter, EnumString}; use thiserror::Error; @@ -49,6 +48,7 @@ pub enum Model { /// Indicates whether this custom model supports caching. cache_configuration: Option, max_output_tokens: Option, + default_temperature: Option, }, } @@ -124,6 +124,19 @@ impl Model { } } + pub fn default_temperature(&self) -> f32 { + match self { + Self::Claude3_5Sonnet + | Self::Claude3Opus + | Self::Claude3Sonnet + | Self::Claude3Haiku => 1.0, + Self::Custom { + default_temperature, + .. + } => default_temperature.unwrap_or(1.0), + } + } + pub fn tool_model_id(&self) -> &str { if let Self::Custom { tool_override: Some(tool_override), @@ -275,7 +288,7 @@ pub async fn stream_completion_with_rate_limit_info( .header("X-Api-Key", api_key) .header("Content-Type", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let serialized_request = serde_json::to_string(&request).context("failed to serialize request")?; @@ -508,6 +521,10 @@ pub struct Usage { pub input_tokens: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub output_tokens: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cache_creation_input_tokens: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cache_read_input_tokens: Option, } #[derive(Debug, Serialize, Deserialize)] diff --git a/crates/assets/src/assets.rs b/crates/assets/src/assets.rs index 395cbf62f6ce5e..ee990085f6de17 100644 --- a/crates/assets/src/assets.rs +++ b/crates/assets/src/assets.rs @@ -8,6 +8,7 @@ use rust_embed::RustEmbed; #[folder = "../../assets"] #[include = "fonts/**/*"] #[include = "icons/**/*"] +#[include = "images/**/*"] #[include = "themes/**/*"] #[exclude = "themes/src/*"] #[include = "sounds/**/*"] diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml index d2b5aed9bd8bba..21153b6fcc39d6 100644 --- a/crates/assistant/Cargo.toml +++ b/crates/assistant/Cargo.toml @@ -51,6 +51,7 @@ indoc.workspace = true language.workspace = true language_model.workspace = true log.workspace = true +lsp.workspace = true markdown.workspace = true menu.workspace = true multi_buffer.workspace = true @@ -65,6 +66,7 @@ proto.workspace = true regex.workspace = true release_channel.workspace = true rope.workspace = true +rpc.workspace = true schemars.workspace = true search.workspace = true semantic_index.workspace = true @@ -93,9 +95,12 @@ editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true language = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } +languages = { workspace = true, features = ["test-support"] } log.workspace = true +pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } rand.workspace = true serde_json_lenient.workspace = true text = { workspace = true, features = ["test-support"] } +tree-sitter-md.workspace = true unindent.workspace = true diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index 70e37ba239eed6..e1e574744fff61 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -6,6 +6,7 @@ mod context; pub mod context_store; mod inline_assistant; mod model_selector; +mod patch; mod prompt_library; mod prompts; mod slash_command; @@ -14,7 +15,6 @@ pub mod slash_command_settings; mod streaming_diff; mod terminal_inline_assistant; mod tools; -mod workflow; pub use assistant_panel::{AssistantPanel, AssistantPanelEvent}; use assistant_settings::AssistantSettings; @@ -35,21 +35,23 @@ use language_model::{ LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, LanguageModelResponseMessage, }; pub(crate) use model_selector::*; +pub use patch::*; pub use prompts::PromptBuilder; use prompts::PromptLoadingParams; -use semantic_index::{CloudEmbeddingProvider, SemanticIndex}; +use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; +use slash_command::workflow_command::WorkflowSlashCommand; use slash_command::{ - context_server_command, default_command, diagnostics_command, docs_command, fetch_command, - file_command, now_command, project_command, prompt_command, search_command, symbols_command, - tab_command, terminal_command, workflow_command, + auto_command, cargo_workspace_command, context_server_command, default_command, delta_command, + diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command, + prompt_command, search_command, symbols_command, tab_command, terminal_command, + workflow_command, }; use std::path::PathBuf; use std::sync::Arc; pub(crate) use streaming_diff::*; use util::ResultExt; -pub use workflow::*; use crate::slash_command_settings::SlashCommandSettings; @@ -58,6 +60,7 @@ actions!( [ Assist, Split, + CopyCode, CycleMessageRole, QuoteSelection, InsertIntoEditor, @@ -68,6 +71,8 @@ actions!( ConfirmCommand, NewContext, ToggleModelSelector, + CycleNextInlineAssist, + CyclePreviousInlineAssist ] ); @@ -210,12 +215,13 @@ pub fn init( let client = client.clone(); async move { let embedding_provider = CloudEmbeddingProvider::new(client.clone()); - let semantic_index = SemanticIndex::new( + let semantic_index = SemanticDb::new( paths::embeddings_dir().join("semantic-index-db.0.mdb"), Arc::new(embedding_provider), &mut cx, ) .await?; + cx.update(|cx| cx.set_global(semantic_index)) } }) @@ -357,30 +363,80 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) { let settings = AssistantSettings::get_global(cx); let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone()); let model_id = LanguageModelId::from(settings.default_model.model.clone()); + let inline_alternatives = settings + .inline_alternatives + .iter() + .map(|alternative| { + ( + LanguageModelProviderId::from(alternative.provider.clone()), + LanguageModelId::from(alternative.model.clone()), + ) + }) + .collect::>(); LanguageModelRegistry::global(cx).update(cx, |registry, cx| { registry.select_active_model(&provider_name, &model_id, cx); + registry.select_inline_alternative_models(inline_alternatives, cx); }); } fn register_slash_commands(prompt_builder: Option>, cx: &mut AppContext) { let slash_command_registry = SlashCommandRegistry::global(cx); + slash_command_registry.register_command(file_command::FileSlashCommand, true); + slash_command_registry.register_command(delta_command::DeltaSlashCommand, true); slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true); slash_command_registry.register_command(tab_command::TabSlashCommand, true); - slash_command_registry.register_command(project_command::ProjectSlashCommand, true); + slash_command_registry + .register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true); slash_command_registry.register_command(prompt_command::PromptSlashCommand, true); slash_command_registry.register_command(default_command::DefaultSlashCommand, false); slash_command_registry.register_command(terminal_command::TerminalSlashCommand, true); slash_command_registry.register_command(now_command::NowSlashCommand, false); slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true); + slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); + slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); if let Some(prompt_builder) = prompt_builder { - slash_command_registry.register_command( - workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()), - true, - ); + cx.observe_global::({ + let slash_command_registry = slash_command_registry.clone(); + let prompt_builder = prompt_builder.clone(); + move |cx| { + if AssistantSettings::get_global(cx).are_live_diffs_enabled(cx) { + slash_command_registry.register_command( + workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()), + true, + ); + } else { + slash_command_registry.unregister_command_by_name(WorkflowSlashCommand::NAME); + } + } + }) + .detach(); + + cx.observe_flag::({ + let slash_command_registry = slash_command_registry.clone(); + move |is_enabled, _cx| { + if is_enabled { + slash_command_registry.register_command( + project_command::ProjectSlashCommand::new(prompt_builder.clone()), + true, + ); + } + } + }) + .detach(); } - slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); + + cx.observe_flag::({ + let slash_command_registry = slash_command_registry.clone(); + move |is_enabled, _cx| { + if is_enabled { + // [#auto-staff-ship] TODO remove this when /auto is no longer staff-shipped + slash_command_registry.register_command(auto_command::AutoCommand, true); + } + } + }) + .detach(); update_slash_commands_from_settings(cx); cx.observe_global::(update_slash_commands_from_settings) @@ -407,10 +463,12 @@ fn update_slash_commands_from_settings(cx: &mut AppContext) { slash_command_registry.unregister_command(docs_command::DocsSlashCommand); } - if settings.project.enabled { - slash_command_registry.register_command(project_command::ProjectSlashCommand, true); + if settings.cargo_workspace.enabled { + slash_command_registry + .register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true); } else { - slash_command_registry.unregister_command(project_command::ProjectSlashCommand); + slash_command_registry + .unregister_command(cargo_workspace_command::CargoWorkspaceSlashCommand); } } diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index fd5f62e1881f99..91a8eb77285aa8 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -11,76 +11,82 @@ use crate::{ }, slash_command_picker, terminal_inline_assistant::TerminalInlineAssistant, - Assist, CacheStatus, ConfirmCommand, Context, ContextEvent, ContextId, ContextStore, - ContextStoreEvent, CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssistId, - InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, MessageMetadata, - MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, PendingSlashCommand, - PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, SavedContextMetadata, Split, - ToggleFocus, ToggleModelSelector, WorkflowStepResolution, + Assist, AssistantPatch, AssistantPatchStatus, CacheStatus, ConfirmCommand, Content, Context, + ContextEvent, ContextId, ContextStore, ContextStoreEvent, CopyCode, CycleMessageRole, + DeployHistory, DeployPromptLibrary, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, + Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, + NewContext, PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, + RemoteContextMetadata, SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector, }; -use anyhow::{anyhow, Result}; +use anyhow::Result; use assistant_slash_command::{SlashCommand, SlashCommandOutputSection}; use assistant_tool::ToolRegistry; -use client::{proto, Client, Status}; +use client::{proto, zed_urls, Client, Status}; use collections::{BTreeSet, HashMap, HashSet}; use editor::{ actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt}, display_map::{ - BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease, CustomBlockId, FoldId, - RenderBlock, ToDisplayPoint, + BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease, + CreaseMetadata, CustomBlockId, FoldId, RenderBlock, ToDisplayPoint, }, - scroll::{Autoscroll, AutoscrollStrategy, ScrollAnchor}, - Anchor, Editor, EditorEvent, ExcerptRange, MultiBuffer, RowExt, ToOffset as _, ToPoint, + scroll::{Autoscroll, AutoscrollStrategy}, + Anchor, Editor, EditorEvent, ProposedChangeLocation, ProposedChangesEditor, RowExt, + ToOffset as _, ToPoint, }; use editor::{display_map::CreaseId, FoldPlaceholder}; use fs::Fs; +use futures::FutureExt; use gpui::{ canvas, div, img, percentage, point, pulsating_between, size, Action, Animation, AnimationExt, AnyElement, AnyView, AppContext, AsyncWindowContext, ClipboardEntry, ClipboardItem, - Context as _, Empty, Entity, EntityId, EventEmitter, ExternalPaths, FocusHandle, FocusableView, - FontWeight, InteractiveElement, IntoElement, Model, ParentElement, Pixels, ReadGlobal, Render, - RenderImage, SharedString, Size, StatefulInteractiveElement, Styled, Subscription, Task, - Transformation, UpdateGlobal, View, VisualContext, WeakView, WindowContext, + CursorStyle, Empty, Entity, EventEmitter, ExternalPaths, FocusHandle, FocusableView, + FontWeight, InteractiveElement, IntoElement, Model, ParentElement, Pixels, Render, RenderImage, + SharedString, Size, StatefulInteractiveElement, Styled, Subscription, Task, Transformation, + UpdateGlobal, View, VisualContext, WeakView, WindowContext, }; use indexed_docs::IndexedDocsStore; use language::{ - language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset, + language_settings::SoftWrap, BufferSnapshot, LanguageRegistry, LspAdapterDelegate, ToOffset, }; use language_model::{ provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, Role, }; +use language_model::{LanguageModelImage, LanguageModelToolUse}; use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; -use project::{Project, ProjectLspAdapterDelegate, Worktree}; +use project::lsp_store::LocalLspAdapterDelegate; +use project::{Project, Worktree}; +use rope::Point; use search::{buffer_search::DivRegistrar, BufferSearchBar}; +use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings}; use smol::stream::StreamExt; use std::{ borrow::Cow, cmp, - collections::hash_map, - fmt::Write, ops::{ControlFlow, Range}, path::PathBuf, sync::Arc, time::Duration, }; use terminal_view::{terminal_panel::TerminalPanel, TerminalView}; +use text::SelectionGoal; use ui::TintColor; use ui::{ prelude::*, utils::{format_distance_from_now, DateTimeType}, - Avatar, AvatarShape, ButtonLike, ContextMenu, Disclosure, ElevationIndex, KeyBinding, ListItem, + Avatar, ButtonLike, ContextMenu, Disclosure, ElevationIndex, KeyBinding, ListItem, ListItemSpacing, PopoverMenu, PopoverMenuHandle, Tooltip, }; use util::{maybe, ResultExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, item::{self, FollowableItem, Item, ItemHandle}, + notifications::NotificationId, pane::{self, SaveIntent}, searchable::{SearchEvent, SearchableItem}, - DraggedSelection, Pane, Save, ShowConfiguration, ToggleZoom, ToolbarItemEvent, + DraggedSelection, Pane, Save, ShowConfiguration, Toast, ToggleZoom, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, }; use workspace::{searchable::SearchableItemHandle, DraggedTab}; @@ -102,6 +108,7 @@ pub fn init(cx: &mut AppContext) { .register_action(AssistantPanel::inline_assist) .register_action(ContextEditor::quote_selection) .register_action(ContextEditor::insert_selection) + .register_action(ContextEditor::copy_code) .register_action(ContextEditor::insert_dragged_files) .register_action(AssistantPanel::show_configuration) .register_action(AssistantPanel::create_new_context); @@ -254,9 +261,7 @@ impl PickerDelegate for SavedContextPickerDelegate { .gap_2() .children(if let Some(host_user) = host_user { vec![ - Avatar::new(host_user.avatar_uri.clone()) - .shape(AvatarShape::Circle) - .into_any_element(), + Avatar::new(host_user.avatar_uri.clone()).into_any_element(), Label::new(format!("Shared by @{}", host_user.github_login)) .color(Color::Muted) .size(LabelSize::Small) @@ -691,7 +696,9 @@ impl AssistantPanel { log::error!("no context found with ID: {}", context_id.to_proto()); return; }; - let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx).log_err(); + let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx) + .log_err() + .flatten(); let assistant_panel = cx.view().downgrade(); let editor = cx.new_view(|cx| { @@ -939,14 +946,22 @@ impl AssistantPanel { cx: &mut ViewContext, ) { if let Some(panel) = workspace.panel::(cx) { - panel.update(cx, |panel, cx| { - panel.new_context(cx); - }); + let did_create_context = panel + .update(cx, |panel, cx| { + panel.new_context(cx)?; + + Some(()) + }) + .is_some(); + if did_create_context { + ContextEditor::quote_selection(workspace, &Default::default(), cx); + } } } fn new_context(&mut self, cx: &mut ViewContext) -> Option> { - if self.project.read(cx).is_via_collab() { + let project = self.project.read(cx); + if project.is_via_collab() && project.dev_server_project_id().is_none() { let task = self .context_store .update(cx, |store, cx| store.create_remote_context(cx)); @@ -957,7 +972,8 @@ impl AssistantPanel { this.update(&mut cx, |this, cx| { let workspace = this.workspace.clone(); let project = this.project.clone(); - let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err(); + let lsp_adapter_delegate = + make_lsp_adapter_delegate(&project, cx).log_err().flatten(); let fs = this.fs.clone(); let project = this.project.clone(); @@ -987,7 +1003,9 @@ impl AssistantPanel { None } else { let context = self.context_store.update(cx, |store, cx| store.create(cx)); - let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx).log_err(); + let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx) + .log_err() + .flatten(); let assistant_panel = cx.view().downgrade(); let editor = cx.new_view(|cx| { @@ -1193,7 +1211,7 @@ impl AssistantPanel { let project = self.project.clone(); let workspace = self.workspace.clone(); - let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err(); + let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten(); cx.spawn(|this, mut cx| async move { let context = context.await?; @@ -1240,7 +1258,9 @@ impl AssistantPanel { .update(cx, |store, cx| store.open_remote_context(id, cx)); let fs = self.fs.clone(); let workspace = self.workspace.clone(); - let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx).log_err(); + let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx) + .log_err() + .flatten(); cx.spawn(|this, mut cx| async move { let context = context.await?; @@ -1423,65 +1443,27 @@ struct ScrollPosition { cursor: Anchor, } -struct WorkflowStepViewState { - header_block_id: CustomBlockId, - header_crease_id: CreaseId, - footer_block_id: Option, - footer_crease_id: Option, - assist: Option, - resolution: Option>>, -} - -impl WorkflowStepViewState { - fn status(&self, cx: &AppContext) -> WorkflowStepStatus { - if let Some(assist) = &self.assist { - match assist.status(cx) { - WorkflowAssistStatus::Idle => WorkflowStepStatus::Idle, - WorkflowAssistStatus::Pending => WorkflowStepStatus::Pending, - WorkflowAssistStatus::Done => WorkflowStepStatus::Done, - WorkflowAssistStatus::Confirmed => WorkflowStepStatus::Confirmed, - } - } else if let Some(resolution) = self.resolution.as_deref() { - match resolution { - Err(err) => WorkflowStepStatus::Error(err), - Ok(_) => WorkflowStepStatus::Idle, - } - } else { - WorkflowStepStatus::Resolving - } - } -} - -#[derive(Clone, Copy)] -enum WorkflowStepStatus<'a> { - Resolving, - Error(&'a anyhow::Error), - Idle, - Pending, - Done, - Confirmed, +struct PatchViewState { + footer_block_id: CustomBlockId, + crease_id: CreaseId, + editor: Option, + update_task: Option>, } -impl<'a> WorkflowStepStatus<'a> { - pub(crate) fn is_confirmed(&self) -> bool { - matches!(self, Self::Confirmed) - } +struct PatchEditorState { + editor: WeakView, + opened_patch: AssistantPatch, } -#[derive(Debug, Eq, PartialEq)] -struct ActiveWorkflowStep { - range: Range, - resolved: bool, -} +type MessageHeader = MessageMetadata; -struct WorkflowAssist { - editor: WeakView, - editor_was_open: bool, - assist_ids: Vec, +#[derive(Clone)] +enum AssistError { + PaymentRequired, + MaxMonthlySpendReached, + Message(SharedString), } -type MessageHeader = MessageMetadata; - pub struct ContextEditor { context: Model, fs: Arc, @@ -1497,10 +1479,10 @@ pub struct ContextEditor { pending_slash_command_blocks: HashMap, CustomBlockId>, pending_tool_use_creases: HashMap, CreaseId>, _subscriptions: Vec, - workflow_steps: HashMap, WorkflowStepViewState>, - active_workflow_step: Option, + patches: HashMap, PatchViewState>, + active_patch: Option>, assistant_panel: WeakView, - error_message: Option, + last_error: Option, show_accept_terms: bool, pub(crate) slash_menu_handle: PopoverMenuHandle>, @@ -1539,7 +1521,7 @@ impl ContextEditor { editor.set_show_runnables(false, cx); editor.set_show_wrap_guides(false, cx); editor.set_show_indent_guides(false, cx); - editor.set_completion_provider(Box::new(completion_provider)); + editor.set_completion_provider(Some(Box::new(completion_provider))); editor.set_collaboration_hub(Box::new(project.clone())); editor }); @@ -1552,7 +1534,7 @@ impl ContextEditor { ]; let sections = context.read(cx).slash_command_output_sections().to_vec(); - let edit_step_ranges = context.read(cx).workflow_step_ranges().collect::>(); + let patch_ranges = context.read(cx).patch_ranges().collect::>(); let mut this = Self { context, editor, @@ -1568,10 +1550,10 @@ impl ContextEditor { pending_slash_command_blocks: HashMap::default(), pending_tool_use_creases: HashMap::default(), _subscriptions, - workflow_steps: HashMap::default(), - active_workflow_step: None, + patches: HashMap::default(), + active_patch: None, assistant_panel, - error_message: None, + last_error: None, show_accept_terms: false, slash_menu_handle: Default::default(), dragged_file_worktrees: Vec::new(), @@ -1579,7 +1561,7 @@ impl ContextEditor { this.update_message_headers(cx); this.update_image_blocks(cx); this.insert_slash_command_output_sections(sections, false, cx); - this.workflow_steps_updated(&Vec::new(), &edit_step_ranges, cx); + this.patches_updated(&Vec::new(), &patch_ranges, cx); this } @@ -1614,134 +1596,28 @@ impl ContextEditor { return; } - if !self.apply_active_workflow_step(cx) { - self.error_message = None; - self.send_to_model(cx); - cx.notify(); - } - } - - fn apply_workflow_step(&mut self, range: Range, cx: &mut ViewContext) { - self.show_workflow_step(range.clone(), cx); - - if let Some(workflow_step) = self.workflow_steps.get(&range) { - if let Some(assist) = workflow_step.assist.as_ref() { - let assist_ids = assist.assist_ids.clone(); - cx.spawn(|this, mut cx| async move { - for assist_id in assist_ids { - let mut receiver = this.update(&mut cx, |_, cx| { - cx.window_context().defer(move |cx| { - InlineAssistant::update_global(cx, |assistant, cx| { - assistant.start_assist(assist_id, cx); - }) - }); - InlineAssistant::update_global(cx, |assistant, _| { - assistant.observe_assist(assist_id) - }) - })?; - while !receiver.borrow().is_done() { - let _ = receiver.changed().await; - } - } - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - } - } - } - - fn apply_active_workflow_step(&mut self, cx: &mut ViewContext) -> bool { - let Some((range, step)) = self.active_workflow_step() else { - return false; - }; - - if let Some(assist) = step.assist.as_ref() { - match assist.status(cx) { - WorkflowAssistStatus::Pending => {} - WorkflowAssistStatus::Confirmed => return false, - WorkflowAssistStatus::Done => self.confirm_workflow_step(range, cx), - WorkflowAssistStatus::Idle => self.apply_workflow_step(range, cx), - } - } else { - match step.resolution.as_deref() { - Some(Ok(_)) => self.apply_workflow_step(range, cx), - Some(Err(_)) => self.resolve_workflow_step(range, cx), - None => {} - } - } - - true - } - - fn resolve_workflow_step( - &mut self, - range: Range, - cx: &mut ViewContext, - ) { - self.context - .update(cx, |context, cx| context.resolve_workflow_step(range, cx)); - } - - fn stop_workflow_step(&mut self, range: Range, cx: &mut ViewContext) { - if let Some(workflow_step) = self.workflow_steps.get(&range) { - if let Some(assist) = workflow_step.assist.as_ref() { - let assist_ids = assist.assist_ids.clone(); - cx.window_context().defer(|cx| { - InlineAssistant::update_global(cx, |assistant, cx| { - for assist_id in assist_ids { - assistant.stop_assist(assist_id, cx); - } - }) - }); - } + if self.focus_active_patch(cx) { + return; } - } - fn undo_workflow_step(&mut self, range: Range, cx: &mut ViewContext) { - if let Some(workflow_step) = self.workflow_steps.get_mut(&range) { - if let Some(assist) = workflow_step.assist.take() { - cx.window_context().defer(|cx| { - InlineAssistant::update_global(cx, |assistant, cx| { - for assist_id in assist.assist_ids { - assistant.undo_assist(assist_id, cx); - } - }) - }); - } - } + self.last_error = None; + self.send_to_model(cx); + cx.notify(); } - fn confirm_workflow_step( - &mut self, - range: Range, - cx: &mut ViewContext, - ) { - if let Some(workflow_step) = self.workflow_steps.get(&range) { - if let Some(assist) = workflow_step.assist.as_ref() { - let assist_ids = assist.assist_ids.clone(); - cx.window_context().defer(move |cx| { - InlineAssistant::update_global(cx, |assistant, cx| { - for assist_id in assist_ids { - assistant.finish_assist(assist_id, false, cx); - } - }) - }); + fn focus_active_patch(&mut self, cx: &mut ViewContext) -> bool { + if let Some((_range, patch)) = self.active_patch() { + if let Some(editor) = patch + .editor + .as_ref() + .and_then(|state| state.editor.upgrade()) + { + cx.focus_view(&editor); + return true; } } - } - fn reject_workflow_step(&mut self, range: Range, cx: &mut ViewContext) { - if let Some(workflow_step) = self.workflow_steps.get_mut(&range) { - if let Some(assist) = workflow_step.assist.take() { - cx.window_context().defer(move |cx| { - InlineAssistant::update_global(cx, |assistant, cx| { - for assist_id in assist.assist_ids { - assistant.finish_assist(assist_id, true, cx); - } - }) - }); - } - } + false } fn send_to_model(&mut self, cx: &mut ViewContext) { @@ -1765,7 +1641,7 @@ impl ContextEditor { } fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { - self.error_message = None; + self.last_error = None; if self .context @@ -1774,19 +1650,6 @@ impl ContextEditor { return; } - if let Some((range, active_step)) = self.active_workflow_step() { - match active_step.status(cx) { - WorkflowStepStatus::Pending => { - self.stop_workflow_step(range, cx); - return; - } - WorkflowStepStatus::Done => { - self.reject_workflow_step(range, cx); - return; - } - _ => {} - } - } cx.propagate(); } @@ -1896,7 +1759,22 @@ impl ContextEditor { cx: &mut ViewContext, ) { if let Some(command) = SlashCommandRegistry::global(cx).command(name) { - let output = command.run(arguments, workspace, self.lsp_adapter_delegate.clone(), cx); + let context = self.context.read(cx); + let sections = context + .slash_command_output_sections() + .into_iter() + .filter(|section| section.is_valid(context.buffer().read(cx))) + .cloned() + .collect::>(); + let snapshot = context.buffer().read(cx).snapshot(); + let output = command.run( + arguments, + §ions, + snapshot, + workspace, + self.lsp_adapter_delegate.clone(), + cx, + ); self.context.update(cx, |context, cx| { context.insert_command_output( command_range, @@ -1988,6 +1866,20 @@ impl ContextEditor { let buffer_row = MultiBufferRow(start.to_point(&buffer).row); buffer_rows_to_fold.insert(buffer_row); + self.context.update(cx, |context, cx| { + context.insert_content( + Content::ToolUse { + range: tool_use.source_range.clone(), + tool_use: LanguageModelToolUse { + id: tool_use.id.to_string(), + name: tool_use.name.clone(), + input: tool_use.input.clone(), + }, + }, + cx, + ); + }); + Crease::new( start..end, placeholder, @@ -2011,8 +1903,8 @@ impl ContextEditor { ); }); } - ContextEvent::WorkflowStepsUpdated { removed, updated } => { - self.workflow_steps_updated(removed, updated, cx); + ContextEvent::PatchesUpdated { removed, updated } => { + self.patches_updated(removed, updated, cx); } ContextEvent::PendingSlashCommandsUpdated { removed, updated } => { self.editor.update(cx, |editor, cx| { @@ -2241,12 +2133,18 @@ impl ContextEditor { } ContextEvent::Operation(_) => {} ContextEvent::ShowAssistError(error_message) => { - self.error_message = Some(error_message.clone()); + self.last_error = Some(AssistError::Message(error_message.clone())); + } + ContextEvent::ShowPaymentRequiredError => { + self.last_error = Some(AssistError::PaymentRequired); + } + ContextEvent::ShowMaxMonthlySpendReachedError => { + self.last_error = Some(AssistError::MaxMonthlySpendReached); } } } - fn workflow_steps_updated( + fn patches_updated( &mut self, removed: &Vec>, updated: &Vec>, @@ -2257,218 +2155,133 @@ impl ContextEditor { let mut removed_block_ids = HashSet::default(); let mut editors_to_close = Vec::new(); for range in removed { - if let Some(state) = self.workflow_steps.remove(range) { - editors_to_close.extend(self.hide_workflow_step(range.clone(), cx)); - removed_block_ids.insert(state.header_block_id); - removed_crease_ids.push(state.header_crease_id); - removed_block_ids.extend(state.footer_block_id); - removed_crease_ids.extend(state.footer_crease_id); + if let Some(state) = self.patches.remove(range) { + editors_to_close.extend(state.editor.and_then(|state| state.editor.upgrade())); + removed_block_ids.insert(state.footer_block_id); + removed_crease_ids.push(state.crease_id); } } - for range in updated { - editors_to_close.extend(self.hide_workflow_step(range.clone(), cx)); - } - self.editor.update(cx, |editor, cx| { let snapshot = editor.snapshot(cx); let multibuffer = &snapshot.buffer_snapshot; - let (&excerpt_id, _, buffer) = multibuffer.as_singleton().unwrap(); + let (&excerpt_id, _, _) = multibuffer.as_singleton().unwrap(); + let mut replaced_blocks = HashMap::default(); for range in updated { - let Some(step) = self.context.read(cx).workflow_step_for_range(&range, cx) else { + let Some(patch) = self.context.read(cx).patch_for_range(&range, cx).cloned() else { continue; }; - let resolution = step.resolution.clone(); - let header_start = step.range.start; - let header_end = if buffer.contains_str_at(step.leading_tags_end, "\n") { - buffer.anchor_before(step.leading_tags_end.to_offset(&buffer) + 1) - } else { - step.leading_tags_end - }; - let header_range = multibuffer - .anchor_in_excerpt(excerpt_id, header_start) - .unwrap() - ..multibuffer - .anchor_in_excerpt(excerpt_id, header_end) - .unwrap(); - let footer_range = step.trailing_tag_start.map(|start| { - let mut step_range_end = step.range.end.to_offset(&buffer); - if buffer.contains_str_at(step_range_end, "\n") { - // Only include the newline if it belongs to the same message. - let messages = self - .context - .read(cx) - .messages_for_offsets([step_range_end, step_range_end + 1], cx); - if messages.len() == 1 { - step_range_end += 1; - } + let path_count = patch.path_count(); + let patch_start = multibuffer + .anchor_in_excerpt(excerpt_id, patch.range.start) + .unwrap(); + let patch_end = multibuffer + .anchor_in_excerpt(excerpt_id, patch.range.end) + .unwrap(); + let render_block: RenderBlock = Box::new({ + let this = this.clone(); + let patch_range = range.clone(); + move |cx: &mut BlockContext<'_, '_>| { + let max_width = cx.max_width; + let gutter_width = cx.gutter_dimensions.full_width(); + let block_id = cx.block_id; + this.update(&mut **cx, |this, cx| { + this.render_patch_footer( + patch_range.clone(), + max_width, + gutter_width, + block_id, + cx, + ) + }) + .ok() + .flatten() + .unwrap_or_else(|| Empty.into_any()) } - - let end = buffer.anchor_before(step_range_end); - multibuffer.anchor_in_excerpt(excerpt_id, start).unwrap() - ..multibuffer.anchor_in_excerpt(excerpt_id, end).unwrap() }); - let block_ids = editor.insert_blocks( - [BlockProperties { - position: header_range.start, - height: 1, - style: BlockStyle::Flex, - render: Box::new({ - let this = this.clone(); - let range = step.range.clone(); - move |cx| { - let block_id = cx.block_id; - let max_width = cx.max_width; - let gutter_width = cx.gutter_dimensions.full_width(); - this.update(&mut **cx, |this, cx| { - this.render_workflow_step_header( - range.clone(), - max_width, - gutter_width, - block_id, - cx, - ) - }) - .ok() - .flatten() - .unwrap_or_else(|| Empty.into_any()) - } - }), - disposition: BlockDisposition::Above, - priority: 0, - }] - .into_iter() - .chain(footer_range.as_ref().map(|footer_range| { - return BlockProperties { - position: footer_range.end, - height: 1, - style: BlockStyle::Flex, - render: Box::new({ - let this = this.clone(); - let range = step.range.clone(); - move |cx| { - let max_width = cx.max_width; - let gutter_width = cx.gutter_dimensions.full_width(); - this.update(&mut **cx, |this, cx| { - this.render_workflow_step_footer( - range.clone(), - max_width, - gutter_width, - cx, - ) - }) - .ok() - .flatten() - .unwrap_or_else(|| Empty.into_any()) - } - }), - disposition: BlockDisposition::Below, - priority: 0, - }; - })), - None, - cx, - ); - let header_placeholder = FoldPlaceholder { - render: Arc::new(move |_, _crease_range, _cx| Empty.into_any()), - constrain_width: false, - merge_adjacent: false, - }; - let footer_placeholder = FoldPlaceholder { - render: render_fold_icon_button( - cx.view().downgrade(), - IconName::Code, - "Edits".into(), - ), + render: { + let this = this.clone(); + let patch_range = range.clone(); + Arc::new(move |fold_id, _range, cx| { + this.update(cx, |this, cx| { + this.render_patch_header(patch_range.clone(), fold_id, cx) + }) + .ok() + .flatten() + .unwrap_or_else(|| Empty.into_any()) + }) + }, constrain_width: false, merge_adjacent: false, }; - let new_crease_ids = editor.insert_creases( - [Crease::new( - header_range.clone(), - header_placeholder.clone(), - fold_toggle("step-header"), - |_, _, _| Empty.into_any_element(), - )] - .into_iter() - .chain(footer_range.clone().map(|footer_range| { - Crease::new( - footer_range, - footer_placeholder.clone(), - |row, is_folded, fold, cx| { - if is_folded { - Empty.into_any_element() - } else { - fold_toggle("step-footer")(row, is_folded, fold, cx) - } - }, - |_, _, _| Empty.into_any_element(), - ) - })), - cx, - ); - - let state = WorkflowStepViewState { - header_block_id: block_ids[0], - header_crease_id: new_crease_ids[0], - footer_block_id: block_ids.get(1).copied(), - footer_crease_id: new_crease_ids.get(1).copied(), - resolution, - assist: None, - }; - - let mut folds_to_insert = [(header_range.clone(), header_placeholder)] - .into_iter() - .chain( - footer_range - .clone() - .map(|range| (range, footer_placeholder)), - ) - .collect::>(); - - match self.workflow_steps.entry(range.clone()) { - hash_map::Entry::Vacant(entry) => { - entry.insert(state); - } - hash_map::Entry::Occupied(mut entry) => { - let entry = entry.get_mut(); - removed_block_ids.insert(entry.header_block_id); - removed_crease_ids.push(entry.header_crease_id); - removed_block_ids.extend(entry.footer_block_id); - removed_crease_ids.extend(entry.footer_crease_id); - folds_to_insert.retain(|(range, _)| snapshot.intersects_fold(range.start)); - *entry = state; + if let Some(state) = self.patches.get_mut(&range) { + replaced_blocks.insert(state.footer_block_id, render_block); + if let Some(editor_state) = &state.editor { + if editor_state.opened_patch != patch { + state.update_task = Some({ + let this = this.clone(); + cx.spawn(|_, cx| async move { + Self::update_patch_editor(this.clone(), patch, cx) + .await + .log_err(); + }) + }); + } } - } + } else { + let block_ids = editor.insert_blocks( + [BlockProperties { + position: patch_start, + height: path_count as u32 + 1, + style: BlockStyle::Flex, + render: render_block, + disposition: BlockDisposition::Below, + priority: 0, + }], + None, + cx, + ); - editor.unfold_ranges( - [header_range.clone()] - .into_iter() - .chain(footer_range.clone()), - true, - false, - cx, - ); + let new_crease_ids = editor.insert_creases( + [Crease::new( + patch_start..patch_end, + header_placeholder.clone(), + fold_toggle("patch-header"), + |_, _, _| Empty.into_any_element(), + )], + cx, + ); - if !folds_to_insert.is_empty() { - editor.fold_ranges(folds_to_insert, false, cx); + self.patches.insert( + range.clone(), + PatchViewState { + footer_block_id: block_ids[0], + crease_id: new_crease_ids[0], + editor: None, + update_task: None, + }, + ); } + + editor.unfold_ranges([patch_start..patch_end], true, false, cx); + editor.fold_ranges([(patch_start..patch_end, header_placeholder)], false, cx); } editor.remove_creases(removed_crease_ids, cx); editor.remove_blocks(removed_block_ids, None, cx); + editor.replace_blocks(replaced_blocks, None, cx); }); - for (editor, editor_was_open) in editors_to_close { - self.close_workflow_editor(cx, editor, editor_was_open); + for editor in editors_to_close { + self.close_patch_editor(editor, cx); } - self.update_active_workflow_step(cx); + self.update_active_patch(cx); } fn insert_slash_command_output_sections( @@ -2491,20 +2304,26 @@ impl ContextEditor { .unwrap(); let buffer_row = MultiBufferRow(start.to_point(&buffer).row); buffer_rows_to_fold.insert(buffer_row); - creases.push(Crease::new( - start..end, - FoldPlaceholder { - render: render_fold_icon_button( - cx.view().downgrade(), - section.icon, - section.label.clone(), - ), - constrain_width: false, - merge_adjacent: false, - }, - render_slash_command_output_toggle, - |_, _, _| Empty.into_any_element(), - )); + creases.push( + Crease::new( + start..end, + FoldPlaceholder { + render: render_fold_icon_button( + cx.view().downgrade(), + section.icon, + section.label.clone(), + ), + constrain_width: false, + merge_adjacent: false, + }, + render_slash_command_output_toggle, + |_, _, _| Empty.into_any_element(), + ) + .with_metadata(CreaseMetadata { + icon: section.icon, + label: section.label, + }), + ); } editor.insert_creases(creases, cx); @@ -2535,87 +2354,75 @@ impl ContextEditor { } EditorEvent::SelectionsChanged { .. } => { self.scroll_position = self.cursor_scroll_position(cx); - self.update_active_workflow_step(cx); + self.update_active_patch(cx); } _ => {} } cx.emit(event.clone()); } - fn active_workflow_step(&self) -> Option<(Range, &WorkflowStepViewState)> { - let step = self.active_workflow_step.as_ref()?; - Some((step.range.clone(), self.workflow_steps.get(&step.range)?)) + fn active_patch(&self) -> Option<(Range, &PatchViewState)> { + let patch = self.active_patch.as_ref()?; + Some((patch.clone(), self.patches.get(&patch)?)) } - fn update_active_workflow_step(&mut self, cx: &mut ViewContext) { - let newest_cursor = self.editor.read(cx).selections.newest::(cx).head(); + fn update_active_patch(&mut self, cx: &mut ViewContext) { + let newest_cursor = self.editor.read(cx).selections.newest::(cx).head(); let context = self.context.read(cx); - let new_step = context - .workflow_step_containing(newest_cursor, cx) - .map(|step| ActiveWorkflowStep { - resolved: step.resolution.is_some(), - range: step.range.clone(), - }); - - if new_step.as_ref() != self.active_workflow_step.as_ref() { - let mut old_editor = None; - let mut old_editor_was_open = None; - if let Some(old_step) = self.active_workflow_step.take() { - (old_editor, old_editor_was_open) = - self.hide_workflow_step(old_step.range, cx).unzip(); - } + let new_patch = context.patch_containing(newest_cursor, cx).cloned(); - let mut new_editor = None; - if let Some(new_step) = new_step { - new_editor = self.show_workflow_step(new_step.range.clone(), cx); - self.active_workflow_step = Some(new_step); - } + if new_patch.as_ref().map(|p| &p.range) == self.active_patch.as_ref() { + return; + } - if new_editor != old_editor { - if let Some((old_editor, old_editor_was_open)) = old_editor.zip(old_editor_was_open) - { - self.close_workflow_editor(cx, old_editor, old_editor_was_open) + if let Some(old_patch_range) = self.active_patch.take() { + if let Some(patch_state) = self.patches.get_mut(&old_patch_range) { + if let Some(state) = patch_state.editor.take() { + if let Some(editor) = state.editor.upgrade() { + self.close_patch_editor(editor, cx); + } } } } - } - fn hide_workflow_step( - &mut self, - step_range: Range, - cx: &mut ViewContext, - ) -> Option<(View, bool)> { - if let Some(step) = self.workflow_steps.get_mut(&step_range) { - let assist = step.assist.as_ref()?; - let editor = assist.editor.upgrade()?; - - if matches!(step.status(cx), WorkflowStepStatus::Idle) { - let assist = step.assist.take().unwrap(); - InlineAssistant::update_global(cx, |assistant, cx| { - for assist_id in assist.assist_ids { - assistant.finish_assist(assist_id, true, cx) + if let Some(new_patch) = new_patch { + self.active_patch = Some(new_patch.range.clone()); + + if let Some(patch_state) = self.patches.get_mut(&new_patch.range) { + let mut editor = None; + if let Some(state) = &patch_state.editor { + if let Some(opened_editor) = state.editor.upgrade() { + editor = Some(opened_editor); } - }); - return Some((editor, assist.editor_was_open)); + } + + if let Some(editor) = editor { + self.workspace + .update(cx, |workspace, cx| { + workspace.activate_item(&editor, true, false, cx); + }) + .ok(); + } else { + patch_state.update_task = Some(cx.spawn(move |this, cx| async move { + Self::open_patch_editor(this, new_patch, cx).await.log_err(); + })); + } } } - - None } - fn close_workflow_editor( + fn close_patch_editor( &mut self, + editor: View, cx: &mut ViewContext, - editor: View, - editor_was_open: bool, ) { self.workspace .update(cx, |workspace, cx| { if let Some(pane) = workspace.pane_for(&editor) { pane.update(cx, |pane, cx| { let item_id = editor.entity_id(); - if !editor_was_open && !editor.read(cx).is_focused(cx) { + if !editor.read(cx).focus_handle(cx).is_focused(cx) { pane.close_item_by_id(item_id, SaveIntent::Skip, cx) .detach_and_log_err(cx); } @@ -2625,191 +2432,94 @@ impl ContextEditor { .ok(); } - fn show_workflow_step( - &mut self, - step_range: Range, - cx: &mut ViewContext, - ) -> Option> { - let step = self.workflow_steps.get_mut(&step_range)?; - - let mut editor_to_return = None; - let mut scroll_to_assist_id = None; - match step.status(cx) { - WorkflowStepStatus::Idle => { - if let Some(assist) = step.assist.as_ref() { - scroll_to_assist_id = assist.assist_ids.first().copied(); - } else if let Some(Ok(resolved)) = step.resolution.clone().as_deref() { - step.assist = Self::open_assists_for_step( - &resolved, - &self.project, - &self.assistant_panel, - &self.workspace, - cx, - ); - editor_to_return = step - .assist - .as_ref() - .and_then(|assist| assist.editor.upgrade()); - } - } - WorkflowStepStatus::Pending => { - if let Some(assist) = step.assist.as_ref() { - let assistant = InlineAssistant::global(cx); - scroll_to_assist_id = assist - .assist_ids - .iter() - .copied() - .find(|assist_id| assistant.assist_status(*assist_id, cx).is_pending()); - } - } - WorkflowStepStatus::Done => { - if let Some(assist) = step.assist.as_ref() { - scroll_to_assist_id = assist.assist_ids.first().copied(); - } - } - _ => {} - } + async fn open_patch_editor( + this: WeakView, + patch: AssistantPatch, + mut cx: AsyncWindowContext, + ) -> Result<()> { + let project = this.update(&mut cx, |this, _| this.project.clone())?; + let resolved_patch = patch.resolve(project.clone(), &mut cx).await; - if let Some(assist_id) = scroll_to_assist_id { - if let Some(assist_editor) = step - .assist - .as_ref() - .and_then(|assists| assists.editor.upgrade()) - { - editor_to_return = Some(assist_editor.clone()); - self.workspace - .update(cx, |workspace, cx| { - workspace.activate_item(&assist_editor, false, false, cx); + let editor = cx.new_view(|cx| { + let editor = ProposedChangesEditor::new( + patch.title.clone(), + resolved_patch + .edit_groups + .iter() + .map(|(buffer, groups)| ProposedChangeLocation { + buffer: buffer.clone(), + ranges: groups + .iter() + .map(|group| group.context_range.clone()) + .collect(), }) - .ok(); - InlineAssistant::update_global(cx, |assistant, cx| { - assistant.scroll_to_assist(assist_id, cx) + .collect(), + Some(project.clone()), + cx, + ); + resolved_patch.apply(&editor, cx); + editor + })?; + + this.update(&mut cx, |this, cx| { + if let Some(patch_state) = this.patches.get_mut(&patch.range) { + patch_state.editor = Some(PatchEditorState { + editor: editor.downgrade(), + opened_patch: patch, }); + patch_state.update_task.take(); } - } - editor_to_return + this.workspace + .update(cx, |workspace, cx| { + workspace.add_item_to_active_pane(Box::new(editor.clone()), None, false, cx) + }) + .log_err(); + })?; + + Ok(()) } - fn open_assists_for_step( - resolved_step: &WorkflowStepResolution, - project: &Model, - assistant_panel: &WeakView, - workspace: &WeakView, - cx: &mut ViewContext, - ) -> Option { - let assistant_panel = assistant_panel.upgrade()?; - if resolved_step.suggestion_groups.is_empty() { - return None; - } + async fn update_patch_editor( + this: WeakView, + patch: AssistantPatch, + mut cx: AsyncWindowContext, + ) -> Result<()> { + let project = this.update(&mut cx, |this, _| this.project.clone())?; + let resolved_patch = patch.resolve(project.clone(), &mut cx).await; + this.update(&mut cx, |this, cx| { + let patch_state = this.patches.get_mut(&patch.range)?; - let editor; - let mut editor_was_open = false; - let mut suggestion_groups = Vec::new(); - if resolved_step.suggestion_groups.len() == 1 - && resolved_step - .suggestion_groups - .values() - .next() - .unwrap() - .len() - == 1 - { - // If there's only one buffer and one suggestion group, open it directly - let (buffer, groups) = resolved_step.suggestion_groups.iter().next().unwrap(); - let group = groups.into_iter().next().unwrap(); - editor = workspace - .update(cx, |workspace, cx| { - let active_pane = workspace.active_pane().clone(); - editor_was_open = - workspace.is_project_item_open::(&active_pane, buffer, cx); - workspace.open_project_item::( - active_pane, - buffer.clone(), - false, - false, - cx, - ) + let locations = resolved_patch + .edit_groups + .iter() + .map(|(buffer, groups)| ProposedChangeLocation { + buffer: buffer.clone(), + ranges: groups + .iter() + .map(|group| group.context_range.clone()) + .collect(), }) - .log_err()?; - let (&excerpt_id, _, _) = editor - .read(cx) - .buffer() - .read(cx) - .read(cx) - .as_singleton() - .unwrap(); - - // Scroll the editor to the suggested assist - editor.update(cx, |editor, cx| { - let multibuffer = editor.buffer().read(cx).snapshot(cx); - let (&excerpt_id, _, buffer) = multibuffer.as_singleton().unwrap(); - let anchor = if group.context_range.start.to_offset(buffer) == 0 { - Anchor::min() - } else { - multibuffer - .anchor_in_excerpt(excerpt_id, group.context_range.start) - .unwrap() - }; + .collect(); - editor.set_scroll_anchor( - ScrollAnchor { - offset: gpui::Point::default(), - anchor, - }, - cx, - ); - }); + if let Some(state) = &mut patch_state.editor { + if let Some(editor) = state.editor.upgrade() { + editor.update(cx, |editor, cx| { + editor.set_title(patch.title.clone(), cx); + editor.reset_locations(locations, cx); + resolved_patch.apply(editor, cx); + }); - suggestion_groups.push((excerpt_id, group)); - } else { - // If there are multiple buffers or suggestion groups, create a multibuffer - let multibuffer = cx.new_model(|cx| { - let replica_id = project.read(cx).replica_id(); - let mut multibuffer = MultiBuffer::new(replica_id, Capability::ReadWrite) - .with_title(resolved_step.title.clone()); - for (buffer, groups) in &resolved_step.suggestion_groups { - let excerpt_ids = multibuffer.push_excerpts( - buffer.clone(), - groups.iter().map(|suggestion_group| ExcerptRange { - context: suggestion_group.context_range.clone(), - primary: None, - }), - cx, - ); - suggestion_groups.extend(excerpt_ids.into_iter().zip(groups)); + state.opened_patch = patch; + } else { + patch_state.editor.take(); } - multibuffer - }); - - editor = cx.new_view(|cx| { - Editor::for_multibuffer(multibuffer, Some(project.clone()), true, cx) - }); - workspace - .update(cx, |workspace, cx| { - workspace.add_item_to_active_pane(Box::new(editor.clone()), None, false, cx) - }) - .log_err()?; - } - - let mut assist_ids = Vec::new(); - for (excerpt_id, suggestion_group) in suggestion_groups { - for suggestion in &suggestion_group.suggestions { - assist_ids.extend(suggestion.show( - &editor, - excerpt_id, - workspace, - &assistant_panel, - cx, - )); } - } + patch_state.update_task.take(); - Some(WorkflowAssist { - assist_ids, - editor: editor.downgrade(), - editor_was_open, - }) + Some(()) + })?; + Ok(()) } fn handle_editor_search_event( @@ -3055,6 +2765,49 @@ impl ContextEditor { }); } + /// Returns either the selected text, or the content of the Markdown code + /// block surrounding the cursor. + fn get_selection_or_code_block( + context_editor_view: &View, + cx: &mut ViewContext, + ) -> Option<(String, bool)> { + const CODE_FENCE_DELIMITER: &'static str = "```"; + + let context_editor = context_editor_view.read(cx).editor.read(cx); + + if context_editor.selections.newest::(cx).is_empty() { + let snapshot = context_editor.buffer().read(cx).snapshot(cx); + let (_, _, snapshot) = snapshot.as_singleton()?; + + let head = context_editor.selections.newest::(cx).head(); + let offset = snapshot.point_to_offset(head); + + let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; + let mut text = snapshot + .text_for_range(surrounding_code_block_range) + .collect::(); + + // If there is no newline trailing the closing three-backticks, then + // tree-sitter-md extends the range of the content node to include + // the backticks. + if text.ends_with(CODE_FENCE_DELIMITER) { + text.drain((text.len() - CODE_FENCE_DELIMITER.len())..); + } + + (!text.is_empty()).then_some((text, true)) + } else { + let anchor = context_editor.selections.newest_anchor(); + let text = context_editor + .buffer() + .read(cx) + .read(cx) + .text_for_range(anchor.range()) + .collect::(); + + (!text.is_empty()).then_some((text, false)) + } + } + fn insert_selection( workspace: &mut Workspace, _: &InsertIntoEditor, @@ -3073,17 +2826,7 @@ impl ContextEditor { return; }; - let context_editor = context_editor_view.read(cx).editor.read(cx); - let anchor = context_editor.selections.newest_anchor(); - let text = context_editor - .buffer() - .read(cx) - .read(cx) - .text_for_range(anchor.range()) - .collect::(); - - // If nothing is selected, don't delete the current selection; instead, be a no-op. - if !text.is_empty() { + if let Some((text, _)) = Self::get_selection_or_code_block(&context_editor_view, cx) { active_editor_view.update(cx, |editor, cx| { editor.insert(&text, cx); editor.focus(cx); @@ -3091,6 +2834,36 @@ impl ContextEditor { } } + fn copy_code(workspace: &mut Workspace, _: &CopyCode, cx: &mut ViewContext) { + let result = maybe!({ + let panel = workspace.panel::(cx)?; + let context_editor_view = panel.read(cx).active_context_editor(cx)?; + Self::get_selection_or_code_block(&context_editor_view, cx) + }); + let Some((text, is_code_block)) = result else { + return; + }; + + cx.write_to_clipboard(ClipboardItem::new_string(text)); + + struct CopyToClipboardToast; + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + format!( + "{} copied to clipboard.", + if is_code_block { + "Code block" + } else { + "Selection" + } + ), + ) + .autohide(), + cx, + ); + } + fn insert_dragged_files( workspace: &mut Workspace, action: &InsertDraggedFiles, @@ -3180,87 +2953,93 @@ impl ContextEditor { return; }; - let selection = editor.update(cx, |editor, cx| editor.selections.newest_adjusted(cx)); - let editor = editor.read(cx); - let buffer = editor.buffer().read(cx).snapshot(cx); - let range = editor::ToOffset::to_offset(&selection.start, &buffer) - ..editor::ToOffset::to_offset(&selection.end, &buffer); - let selected_text = buffer.text_for_range(range.clone()).collect::(); - if selected_text.is_empty() { - return; - } - - let start_language = buffer.language_at(range.start); - let end_language = buffer.language_at(range.end); - let language_name = if start_language == end_language { - start_language.map(|language| language.code_fence_block_name()) - } else { - None - }; - let language_name = language_name.as_deref().unwrap_or(""); - - let filename = buffer - .file_at(selection.start) - .map(|file| file.full_path(cx)); - - let text = if language_name == "markdown" { - selected_text - .lines() - .map(|line| format!("> {}", line)) - .collect::>() - .join("\n") - } else { - let start_symbols = buffer - .symbols_containing(selection.start, None) - .map(|(_, symbols)| symbols); - let end_symbols = buffer - .symbols_containing(selection.end, None) - .map(|(_, symbols)| symbols); - - let outline_text = - if let Some((start_symbols, end_symbols)) = start_symbols.zip(end_symbols) { - Some( - start_symbols - .into_iter() - .zip(end_symbols) - .take_while(|(a, b)| a == b) - .map(|(a, _)| a.text) - .collect::>() - .join(" > "), - ) + let mut creases = vec![]; + editor.update(cx, |editor, cx| { + let selections = editor.selections.all_adjusted(cx); + let buffer = editor.buffer().read(cx).snapshot(cx); + for selection in selections { + let range = editor::ToOffset::to_offset(&selection.start, &buffer) + ..editor::ToOffset::to_offset(&selection.end, &buffer); + let selected_text = buffer.text_for_range(range.clone()).collect::(); + if selected_text.is_empty() { + continue; + } + let start_language = buffer.language_at(range.start); + let end_language = buffer.language_at(range.end); + let language_name = if start_language == end_language { + start_language.map(|language| language.code_fence_block_name()) } else { None }; + let language_name = language_name.as_deref().unwrap_or(""); + let filename = buffer + .file_at(selection.start) + .map(|file| file.full_path(cx)); + let text = if language_name == "markdown" { + selected_text + .lines() + .map(|line| format!("> {}", line)) + .collect::>() + .join("\n") + } else { + let start_symbols = buffer + .symbols_containing(selection.start, None) + .map(|(_, symbols)| symbols); + let end_symbols = buffer + .symbols_containing(selection.end, None) + .map(|(_, symbols)| symbols); + + let outline_text = if let Some((start_symbols, end_symbols)) = + start_symbols.zip(end_symbols) + { + Some( + start_symbols + .into_iter() + .zip(end_symbols) + .take_while(|(a, b)| a == b) + .map(|(a, _)| a.text) + .collect::>() + .join(" > "), + ) + } else { + None + }; - let line_comment_prefix = start_language - .and_then(|l| l.default_scope().line_comment_prefixes().first().cloned()); - - let fence = codeblock_fence_for_path( - filename.as_deref(), - Some(selection.start.row..selection.end.row), - ); + let line_comment_prefix = start_language + .and_then(|l| l.default_scope().line_comment_prefixes().first().cloned()); - if let Some((line_comment_prefix, outline_text)) = line_comment_prefix.zip(outline_text) - { - let breadcrumb = format!("{line_comment_prefix}Excerpt from: {outline_text}\n"); - format!("{fence}{breadcrumb}{selected_text}\n```") - } else { - format!("{fence}{selected_text}\n```") - } - }; + let fence = codeblock_fence_for_path( + filename.as_deref(), + Some(selection.start.row..=selection.end.row), + ); - let crease_title = if let Some(path) = filename { - let start_line = selection.start.row + 1; - let end_line = selection.end.row + 1; - if start_line == end_line { - format!("{}, Line {}", path.display(), start_line) - } else { - format!("{}, Lines {} to {}", path.display(), start_line, end_line) + if let Some((line_comment_prefix, outline_text)) = + line_comment_prefix.zip(outline_text) + { + let breadcrumb = + format!("{line_comment_prefix}Excerpt from: {outline_text}\n"); + format!("{fence}{breadcrumb}{selected_text}\n```") + } else { + format!("{fence}{selected_text}\n```") + } + }; + let crease_title = if let Some(path) = filename { + let start_line = selection.start.row + 1; + let end_line = selection.end.row + 1; + if start_line == end_line { + format!("{}, Line {}", path.display(), start_line) + } else { + format!("{}, Lines {} to {}", path.display(), start_line, end_line) + } + } else { + "Quoted selection".to_string() + }; + creases.push((text, crease_title)); } - } else { - "Quoted selection".to_string() - }; - + }); + if creases.is_empty() { + return; + } // Activate the panel if !panel.focus_handle(cx).contains_focused(cx) { workspace.toggle_panel_focus::(cx); @@ -3277,39 +3056,40 @@ impl ContextEditor { context.update(cx, |context, cx| { context.editor.update(cx, |editor, cx| { editor.insert("\n", cx); + for (text, crease_title) in creases { + let point = editor.selections.newest::(cx).head(); + let start_row = MultiBufferRow(point.row); - let point = editor.selections.newest::(cx).head(); - let start_row = MultiBufferRow(point.row); + editor.insert(&text, cx); - editor.insert(&text, cx); + let snapshot = editor.buffer().read(cx).snapshot(cx); + let anchor_before = snapshot.anchor_after(point); + let anchor_after = editor + .selections + .newest_anchor() + .head() + .bias_left(&snapshot); - let snapshot = editor.buffer().read(cx).snapshot(cx); - let anchor_before = snapshot.anchor_after(point); - let anchor_after = editor - .selections - .newest_anchor() - .head() - .bias_left(&snapshot); + editor.insert("\n", cx); - editor.insert("\n", cx); - - let fold_placeholder = quote_selection_fold_placeholder( - crease_title, - cx.view().downgrade(), - ); - let crease = Crease::new( - anchor_before..anchor_after, - fold_placeholder, - render_quote_selection_output_toggle, - |_, _, _| Empty.into_any(), - ); - editor.insert_creases(vec![crease], cx); - editor.fold_at( - &FoldAt { - buffer_row: start_row, - }, - cx, - ); + let fold_placeholder = quote_selection_fold_placeholder( + crease_title, + cx.view().downgrade(), + ); + let crease = Crease::new( + anchor_before..anchor_after, + fold_placeholder, + render_quote_selection_output_toggle, + |_, _, _| Empty.into_any(), + ); + editor.insert_creases(vec![crease], cx); + editor.fold_at( + &FoldAt { + buffer_row: start_row, + }, + cx, + ); + } }) }); }; @@ -3318,39 +3098,132 @@ impl ContextEditor { } fn copy(&mut self, _: &editor::actions::Copy, cx: &mut ViewContext) { - let editor = self.editor.read(cx); + if self.editor.read(cx).selections.count() == 1 { + let (copied_text, metadata, _) = self.get_clipboard_contents(cx); + cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( + copied_text, + metadata, + )); + cx.stop_propagation(); + return; + } + + cx.propagate(); + } + + fn cut(&mut self, _: &editor::actions::Cut, cx: &mut ViewContext) { + if self.editor.read(cx).selections.count() == 1 { + let (copied_text, metadata, selections) = self.get_clipboard_contents(cx); + + self.editor.update(cx, |editor, cx| { + editor.transact(cx, |this, cx| { + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(selections); + }); + this.insert("", cx); + cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( + copied_text, + metadata, + )); + }); + }); + + cx.stop_propagation(); + return; + } + + cx.propagate(); + } + + fn get_clipboard_contents( + &mut self, + cx: &mut ViewContext, + ) -> (String, CopyMetadata, Vec>) { + let (snapshot, selection, creases) = self.editor.update(cx, |editor, cx| { + let mut selection = editor.selections.newest::(cx); + let snapshot = editor.buffer().read(cx).snapshot(cx); + + let is_entire_line = selection.is_empty() || editor.selections.line_mode; + if is_entire_line { + selection.start = Point::new(selection.start.row, 0); + selection.end = + cmp::min(snapshot.max_point(), Point::new(selection.start.row + 1, 0)); + selection.goal = SelectionGoal::None; + } + + let selection_start = snapshot.point_to_offset(selection.start); + + ( + snapshot.clone(), + selection.clone(), + editor.display_map.update(cx, |display_map, cx| { + display_map + .snapshot(cx) + .crease_snapshot + .creases_in_range( + MultiBufferRow(selection.start.row) + ..MultiBufferRow(selection.end.row + 1), + &snapshot, + ) + .filter_map(|crease| { + if let Some(metadata) = &crease.metadata { + let start = crease + .range + .start + .to_offset(&snapshot) + .saturating_sub(selection_start); + let end = crease + .range + .end + .to_offset(&snapshot) + .saturating_sub(selection_start); + + let range_relative_to_selection = start..end; + + if range_relative_to_selection.is_empty() { + None + } else { + Some(SelectedCreaseMetadata { + range_relative_to_selection, + crease: metadata.clone(), + }) + } + } else { + None + } + }) + .collect::>() + }), + ) + }); + + let selection = selection.map(|point| snapshot.point_to_offset(point)); let context = self.context.read(cx); - if editor.selections.count() == 1 { - let selection = editor.selections.newest::(cx); - let mut copied_text = String::new(); - let mut spanned_messages = 0; - for message in context.messages(cx) { - if message.offset_range.start >= selection.range().end { - break; - } else if message.offset_range.end >= selection.range().start { - let range = cmp::max(message.offset_range.start, selection.range().start) - ..cmp::min(message.offset_range.end, selection.range().end); - if !range.is_empty() { - spanned_messages += 1; - write!(&mut copied_text, "## {}\n\n", message.role).unwrap(); - for chunk in context.buffer().read(cx).text_for_range(range) { - copied_text.push_str(chunk); - } - copied_text.push('\n'); + + let mut text = String::new(); + for message in context.messages(cx) { + if message.offset_range.start >= selection.range().end { + break; + } else if message.offset_range.end >= selection.range().start { + let range = cmp::max(message.offset_range.start, selection.range().start) + ..cmp::min(message.offset_range.end, selection.range().end); + if !range.is_empty() { + for chunk in context.buffer().read(cx).text_for_range(range) { + text.push_str(chunk); + } + if message.offset_range.end < selection.range().end { + text.push('\n'); } } } - - if spanned_messages > 1 { - cx.write_to_clipboard(ClipboardItem::new_string(copied_text)); - return; - } } - cx.propagate(); + (text, CopyMetadata { creases }, vec![selection]) } - fn paste(&mut self, _: &editor::actions::Paste, cx: &mut ViewContext) { + fn paste(&mut self, action: &editor::actions::Paste, cx: &mut ViewContext) { + cx.stop_propagation(); + let images = if let Some(item) = cx.read_from_clipboard() { item.into_entries() .filter_map(|entry| { @@ -3365,9 +3238,62 @@ impl ContextEditor { Vec::new() }; + let metadata = if let Some(item) = cx.read_from_clipboard() { + item.entries().first().and_then(|entry| { + if let ClipboardEntry::String(text) = entry { + text.metadata_json::() + } else { + None + } + }) + } else { + None + }; + if images.is_empty() { - // If we didn't find any valid image data to paste, propagate to let normal pasting happen. - cx.propagate(); + self.editor.update(cx, |editor, cx| { + let paste_position = editor.selections.newest::(cx).head(); + editor.paste(action, cx); + + if let Some(metadata) = metadata { + let buffer = editor.buffer().read(cx).snapshot(cx); + + let mut buffer_rows_to_fold = BTreeSet::new(); + let weak_editor = cx.view().downgrade(); + editor.insert_creases( + metadata.creases.into_iter().map(|metadata| { + let start = buffer.anchor_after( + paste_position + metadata.range_relative_to_selection.start, + ); + let end = buffer.anchor_before( + paste_position + metadata.range_relative_to_selection.end, + ); + + let buffer_row = MultiBufferRow(start.to_point(&buffer).row); + buffer_rows_to_fold.insert(buffer_row); + Crease::new( + start..end, + FoldPlaceholder { + constrain_width: false, + render: render_fold_icon_button( + weak_editor.clone(), + metadata.crease.icon, + metadata.crease.label.clone(), + ), + merge_adjacent: false, + }, + render_slash_command_output_toggle, + |_, _, _| Empty.into_any(), + ) + .with_metadata(metadata.crease.clone()) + }), + cx, + ); + for buffer_row in buffer_rows_to_fold.into_iter().rev() { + editor.fold_at(&FoldAt { buffer_row }, cx); + } + } + }); } else { let mut image_positions = Vec::new(); self.editor.update(cx, |editor, cx| { @@ -3388,10 +3314,22 @@ impl ContextEditor { self.context.update(cx, |context, cx| { for image in images { + let Some(render_image) = image.to_image_data(cx).log_err() else { + continue; + }; let image_id = image.id(); - context.insert_image(image, cx); + let image_task = LanguageModelImage::from_image(image, cx).shared(); + for image_position in image_positions.iter() { - context.insert_image_anchor(image_id, image_position.text_anchor, cx); + context.insert_content( + Content::Image { + anchor: image_position.text_anchor, + image_id, + image: image_task.clone(), + render_image: render_image.clone(), + }, + cx, + ); } } }); @@ -3406,11 +3344,23 @@ impl ContextEditor { let new_blocks = self .context .read(cx) - .images(cx) - .filter_map(|image| { + .contents(cx) + .filter_map(|content| { + if let Content::Image { + anchor, + render_image, + .. + } = content + { + Some((anchor, render_image)) + } else { + None + } + }) + .filter_map(|(anchor, render_image)| { const MAX_HEIGHT_IN_LINES: u32 = 8; - let anchor = buffer.anchor_in_excerpt(excerpt_id, image.anchor).unwrap(); - let image = image.render_image.clone(); + let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap(); + let image = render_image.clone(); anchor.is_valid(&buffer).then(|| BlockProperties { position: anchor, height: MAX_HEIGHT_IN_LINES, @@ -3474,394 +3424,91 @@ impl ContextEditor { .unwrap_or_else(|| Cow::Borrowed(DEFAULT_TAB_TITLE)) } - fn render_workflow_step_header( + fn render_patch_header( &self, range: Range, - max_width: Pixels, - gutter_width: Pixels, - id: BlockId, + _id: FoldId, cx: &mut ViewContext, ) -> Option { - let step_state = self.workflow_steps.get(&range)?; - let status = step_state.status(cx); - let this = cx.view().downgrade(); - - let theme = cx.theme().status(); - let is_confirmed = status.is_confirmed(); - let border_color = if is_confirmed { - theme.ignored_border - } else { - theme.info_border - }; - - let editor = self.editor.read(cx); - let focus_handle = editor.focus_handle(cx); - let snapshot = editor - .buffer() - .read(cx) - .as_singleton()? - .read(cx) - .text_snapshot(); - let start_offset = range.start.to_offset(&snapshot); - let parent_message = self - .context - .read(cx) - .messages_for_offsets([start_offset], cx); - debug_assert_eq!(parent_message.len(), 1); - let parent_message = parent_message.first()?; - - let step_index = self - .workflow_steps - .keys() - .filter(|workflow_step_range| { - workflow_step_range - .start - .cmp(&parent_message.anchor_range.start, &snapshot) - .is_ge() - && workflow_step_range.end.cmp(&range.end, &snapshot).is_le() - }) - .count(); - - let step_label = Label::new(format!("Step {step_index}")).size(LabelSize::Small); - - let step_label = if is_confirmed { - h_flex() - .items_center() - .gap_2() - .child(step_label.strikethrough(true).color(Color::Muted)) - .child( - Icon::new(IconName::Check) - .size(IconSize::Small) - .color(Color::Created), - ) - } else { - div().child(step_label) - }; - + let patch = self.context.read(cx).patch_for_range(&range, cx)?; + let theme = cx.theme().clone(); Some( - v_flex() - .w(max_width) - .pl(gutter_width) - .child( - h_flex() - .w_full() - .h_8() - .border_b_1() - .border_color(border_color) - .items_center() - .justify_between() - .gap_2() - .child(h_flex().justify_start().gap_2().child(step_label)) - .child(h_flex().w_full().justify_end().child( - Self::render_workflow_step_status( - status, - range.clone(), - focus_handle.clone(), - this.clone(), - id, - ), - )), - ) - // todo!("do we wanna keep this?") - // .children(edit_paths.iter().map(|path| { - // h_flex() - // .gap_1() - // .child(Icon::new(IconName::File)) - // .child(Label::new(path.clone())) - // })) + h_flex() + .px_1() + .py_0p5() + .border_b_1() + .border_color(theme.status().info_border) + .gap_1() + .child(Icon::new(IconName::Diff).size(IconSize::Small)) + .child(Label::new(patch.title.clone()).size(LabelSize::Small)) .into_any(), ) } - fn render_workflow_step_footer( - &self, - step_range: Range, + fn render_patch_footer( + &mut self, + range: Range, max_width: Pixels, gutter_width: Pixels, + id: BlockId, cx: &mut ViewContext, ) -> Option { - let step = self.workflow_steps.get(&step_range)?; - let current_status = step.status(cx); - let theme = cx.theme().status(); - let border_color = if current_status.is_confirmed() { - theme.ignored_border - } else { - theme.info_border - }; + let snapshot = self.editor.update(cx, |editor, cx| editor.snapshot(cx)); + let (excerpt_id, _buffer_id, _) = snapshot.buffer_snapshot.as_singleton().unwrap(); + let excerpt_id = *excerpt_id; + let anchor = snapshot + .buffer_snapshot + .anchor_in_excerpt(excerpt_id, range.start) + .unwrap(); + + if !snapshot.intersects_fold(anchor) { + return None; + } + + let patch = self.context.read(cx).patch_for_range(&range, cx)?; + let paths = patch + .paths() + .map(|p| SharedString::from(p.to_string())) + .collect::>(); + Some( v_flex() - .w(max_width) - .pt_1() + .id(id) .pl(gutter_width) - .child(h_flex().h(px(1.)).bg(border_color)) - .into_any(), - ) - } - - fn render_workflow_step_status( - status: WorkflowStepStatus, - step_range: Range, - focus_handle: FocusHandle, - editor: WeakView, - id: BlockId, - ) -> AnyElement { - let id = EntityId::from(id).as_u64(); - fn display_keybind_in_tooltip( - step_range: &Range, - editor: &WeakView, - cx: &mut WindowContext<'_>, - ) -> bool { - editor - .update(cx, |this, _| { - this.active_workflow_step - .as_ref() - .map(|step| &step.range == step_range) - }) - .ok() - .flatten() - .unwrap_or_default() - } - - match status { - WorkflowStepStatus::Error(error) => { - let error = error.to_string(); - h_flex() - .gap_2() - .child( - div() - .id("step-resolution-failure") - .child( - Label::new("Step Resolution Failed") - .size(LabelSize::Small) - .color(Color::Error), - ) - .tooltip(move |cx| Tooltip::text(error.clone(), cx)), - ) - .child( - Button::new(("transform", id), "Retry") - .icon(IconName::Update) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .label_size(LabelSize::Small) - .on_click({ - let editor = editor.clone(); - let step_range = step_range.clone(); - move |_, cx| { - editor - .update(cx, |this, cx| { - this.resolve_workflow_step(step_range.clone(), cx) - }) - .ok(); - } - }), - ) - .into_any() - } - WorkflowStepStatus::Idle | WorkflowStepStatus::Resolving { .. } => { - Button::new(("transform", id), "Transform") - .icon(IconName::SparkleAlt) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .label_size(LabelSize::Small) - .style(ButtonStyle::Tinted(TintColor::Accent)) - .tooltip({ - let step_range = step_range.clone(); - let editor = editor.clone(); - move |cx| { - cx.new_view(|cx| { - let tooltip = Tooltip::new("Transform"); - if display_keybind_in_tooltip(&step_range, &editor, cx) { - tooltip.key_binding(KeyBinding::for_action_in( - &Assist, - &focus_handle, - cx, - )) - } else { - tooltip - } - }) - .into() - } - }) - .on_click({ - let editor = editor.clone(); - let step_range = step_range.clone(); - let is_idle = matches!(status, WorkflowStepStatus::Idle); - move |_, cx| { - if is_idle { - editor - .update(cx, |this, cx| { - this.apply_workflow_step(step_range.clone(), cx) - }) - .ok(); - } - } - }) - .map(|this| { - if let WorkflowStepStatus::Resolving = &status { - this.with_animation( - ("resolving-suggestion-animation", id), + .w(max_width) + .py_2() + .cursor(CursorStyle::PointingHand) + .on_click(cx.listener(move |this, _, cx| { + this.editor.update(cx, |editor, cx| { + editor.change_selections(None, cx, |selections| { + selections.select_ranges(vec![anchor..anchor]); + }); + }); + this.focus_active_patch(cx); + })) + .children(paths.into_iter().map(|path| { + h_flex() + .pl_1() + .gap_1() + .child(Icon::new(IconName::File).size(IconSize::Small)) + .child(Label::new(path).size(LabelSize::Small)) + })) + .when(patch.status == AssistantPatchStatus::Pending, |div| { + div.child( + Label::new("Generating") + .color(Color::Muted) + .size(LabelSize::Small) + .with_animation( + "pulsating-label", Animation::new(Duration::from_secs(2)) .repeat() - .with_easing(pulsating_between(0.4, 0.8)), + .with_easing(pulsating_between(0.4, 1.)), |label, delta| label.alpha(delta), - ) - .into_any_element() - } else { - this.into_any_element() - } - }) - } - WorkflowStepStatus::Pending => h_flex() - .items_center() - .gap_2() - .child( - Label::new("Applying...") - .size(LabelSize::Small) - .with_animation( - ("applying-step-transformation-label", id), - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.4, 0.8)), - |label, delta| label.alpha(delta), - ), - ) - .child( - IconButton::new(("stop-transformation", id), IconName::Stop) - .icon_size(IconSize::Small) - .icon_color(Color::Error) - .style(ButtonStyle::Subtle) - .tooltip({ - let step_range = step_range.clone(); - let editor = editor.clone(); - move |cx| { - cx.new_view(|cx| { - let tooltip = Tooltip::new("Stop Transformation"); - if display_keybind_in_tooltip(&step_range, &editor, cx) { - tooltip.key_binding(KeyBinding::for_action_in( - &editor::actions::Cancel, - &focus_handle, - cx, - )) - } else { - tooltip - } - }) - .into() - } - }) - .on_click({ - let editor = editor.clone(); - let step_range = step_range.clone(); - move |_, cx| { - editor - .update(cx, |this, cx| { - this.stop_workflow_step(step_range.clone(), cx) - }) - .ok(); - } - }), - ) - .into_any_element(), - WorkflowStepStatus::Done => h_flex() - .gap_1() - .child( - IconButton::new(("stop-transformation", id), IconName::Close) - .icon_size(IconSize::Small) - .style(ButtonStyle::Tinted(TintColor::Negative)) - .tooltip({ - let focus_handle = focus_handle.clone(); - let editor = editor.clone(); - let step_range = step_range.clone(); - move |cx| { - cx.new_view(|cx| { - let tooltip = Tooltip::new("Reject Transformation"); - if display_keybind_in_tooltip(&step_range, &editor, cx) { - tooltip.key_binding(KeyBinding::for_action_in( - &editor::actions::Cancel, - &focus_handle, - cx, - )) - } else { - tooltip - } - }) - .into() - } - }) - .on_click({ - let editor = editor.clone(); - let step_range = step_range.clone(); - move |_, cx| { - editor - .update(cx, |this, cx| { - this.reject_workflow_step(step_range.clone(), cx); - }) - .ok(); - } - }), - ) - .child( - Button::new(("confirm-workflow-step", id), "Accept") - .icon(IconName::Check) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .label_size(LabelSize::Small) - .style(ButtonStyle::Tinted(TintColor::Positive)) - .tooltip({ - let editor = editor.clone(); - let step_range = step_range.clone(); - move |cx| { - cx.new_view(|cx| { - let tooltip = Tooltip::new("Accept Transformation"); - if display_keybind_in_tooltip(&step_range, &editor, cx) { - tooltip.key_binding(KeyBinding::for_action_in( - &Assist, - &focus_handle, - cx, - )) - } else { - tooltip - } - }) - .into() - } - }) - .on_click({ - let editor = editor.clone(); - let step_range = step_range.clone(); - move |_, cx| { - editor - .update(cx, |this, cx| { - this.confirm_workflow_step(step_range.clone(), cx); - }) - .ok(); - } - }), - ) - .into_any_element(), - WorkflowStepStatus::Confirmed => h_flex() - .child( - Button::new(("revert-workflow-step", id), "Undo") - .style(ButtonStyle::Filled) - .icon(Some(IconName::Undo)) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .label_size(LabelSize::Small) - .on_click({ - let editor = editor.clone(); - let step_range = step_range.clone(); - move |_, cx| { - editor - .update(cx, |this, cx| { - this.undo_workflow_step(step_range.clone(), cx); - }) - .ok(); - } - }), - ) - .into_any_element(), - } + ), + ) + }) + .into_any(), + ) } fn render_notice(&self, cx: &mut ViewContext) -> Option { @@ -3922,16 +3569,18 @@ impl ContextEditor { h_flex() .gap_3() .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) .child(Label::new(label)), ) .child( - Button::new("open-configuration", "Open configuration") + Button::new("open-configuration", "Configure Providers") .size(ButtonSize::Compact) + .icon(Some(IconName::SlidersVertical)) .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) .style(ButtonStyle::Filled) .on_click({ let focus_handle = self.focus_handle(cx).clone(); @@ -3949,17 +3598,6 @@ impl ContextEditor { fn render_send_button(&self, cx: &mut ViewContext) -> impl IntoElement { let focus_handle = self.focus_handle(cx).clone(); - let button_text = match self.active_workflow_step() { - Some((_, step)) => match step.status(cx) { - WorkflowStepStatus::Error(_) => "Retry Step Resolution", - WorkflowStepStatus::Resolving => "Transform", - WorkflowStepStatus::Idle => "Transform", - WorkflowStepStatus::Pending => "Applying...", - WorkflowStepStatus::Done => "Accept", - WorkflowStepStatus::Confirmed => "Send", - }, - None => "Send", - }; let (style, tooltip) = match token_state(&self.context, cx) { Some(TokenState::NoTokensLeft { .. }) => ( @@ -3999,7 +3637,7 @@ impl ContextEditor { button.tooltip(move |_| tooltip.clone()) }) .layer(ElevationIndex::ModalSurface) - .child(Label::new(button_text)) + .child(Label::new("Send")) .children( KeyBinding::for_action_in(&Assist, &focus_handle, cx) .map(|binding| binding.into_any_element()), @@ -4008,6 +3646,194 @@ impl ContextEditor { focus_handle.dispatch_action(&Assist, cx); }) } + + fn render_last_error(&self, cx: &mut ViewContext) -> Option { + let last_error = self.last_error.as_ref()?; + + Some( + div() + .absolute() + .right_3() + .bottom_12() + .max_w_96() + .py_2() + .px_3() + .elevation_2(cx) + .occlude() + .child(match last_error { + AssistError::PaymentRequired => self.render_payment_required_error(cx), + AssistError::MaxMonthlySpendReached => { + self.render_max_monthly_spend_reached_error(cx) + } + AssistError::Message(error_message) => { + self.render_assist_error(error_message, cx) + } + }) + .into_any(), + ) + } + + fn render_payment_required_error(&self, cx: &mut ViewContext) -> AnyElement { + const ERROR_MESSAGE: &str = "Free tier exceeded. Subscribe and add payment to continue using Zed LLMs. You'll be billed at cost for tokens used."; + + v_flex() + .gap_0p5() + .child( + h_flex() + .gap_1p5() + .items_center() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child(Label::new("Free Usage Exceeded").weight(FontWeight::MEDIUM)), + ) + .child( + div() + .id("error-message") + .max_h_24() + .overflow_y_scroll() + .child(Label::new(ERROR_MESSAGE)), + ) + .child( + h_flex() + .justify_end() + .mt_1() + .child(Button::new("subscribe", "Subscribe").on_click(cx.listener( + |this, _, cx| { + this.last_error = None; + cx.open_url(&zed_urls::account_url(cx)); + cx.notify(); + }, + ))) + .child(Button::new("dismiss", "Dismiss").on_click(cx.listener( + |this, _, cx| { + this.last_error = None; + cx.notify(); + }, + ))), + ) + .into_any() + } + + fn render_max_monthly_spend_reached_error(&self, cx: &mut ViewContext) -> AnyElement { + const ERROR_MESSAGE: &str = "You have reached your maximum monthly spend. Increase your spend limit to continue using Zed LLMs."; + + v_flex() + .gap_0p5() + .child( + h_flex() + .gap_1p5() + .items_center() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child(Label::new("Max Monthly Spend Reached").weight(FontWeight::MEDIUM)), + ) + .child( + div() + .id("error-message") + .max_h_24() + .overflow_y_scroll() + .child(Label::new(ERROR_MESSAGE)), + ) + .child( + h_flex() + .justify_end() + .mt_1() + .child( + Button::new("subscribe", "Update Monthly Spend Limit").on_click( + cx.listener(|this, _, cx| { + this.last_error = None; + cx.open_url(&zed_urls::account_url(cx)); + cx.notify(); + }), + ), + ) + .child(Button::new("dismiss", "Dismiss").on_click(cx.listener( + |this, _, cx| { + this.last_error = None; + cx.notify(); + }, + ))), + ) + .into_any() + } + + fn render_assist_error( + &self, + error_message: &SharedString, + cx: &mut ViewContext, + ) -> AnyElement { + v_flex() + .gap_0p5() + .child( + h_flex() + .gap_1p5() + .items_center() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child( + Label::new("Error interacting with language model") + .weight(FontWeight::MEDIUM), + ), + ) + .child( + div() + .id("error-message") + .max_h_24() + .overflow_y_scroll() + .child(Label::new(error_message.clone())), + ) + .child( + h_flex() + .justify_end() + .mt_1() + .child(Button::new("dismiss", "Dismiss").on_click(cx.listener( + |this, _, cx| { + this.last_error = None; + cx.notify(); + }, + ))), + ) + .into_any() + } +} + +/// Returns the contents of the *outermost* fenced code block that contains the given offset. +fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Option> { + const CODE_BLOCK_NODE: &'static str = "fenced_code_block"; + const CODE_BLOCK_CONTENT: &'static str = "code_fence_content"; + + let layer = snapshot.syntax_layers().next()?; + + let root_node = layer.node(); + let mut cursor = root_node.walk(); + + // Go to the first child for the given offset + while cursor.goto_first_child_for_byte(offset).is_some() { + // If we're at the end of the node, go to the next one. + // Example: if you have a fenced-code-block, and you're on the start of the line + // right after the closing ```, you want to skip the fenced-code-block and + // go to the next sibling. + if cursor.node().end_byte() == offset { + cursor.goto_next_sibling(); + } + + if cursor.node().start_byte() > offset { + break; + } + + // We found the fenced code block. + if cursor.node().kind() == CODE_BLOCK_NODE { + // Now we need to find the child node that contains the code. + cursor.goto_first_child(); + loop { + if cursor.node().kind() == CODE_BLOCK_CONTENT { + return Some(cursor.node().byte_range()); + } + if !cursor.goto_next_sibling() { + break; + } + } + } + } + + None } fn render_fold_icon_button( @@ -4037,6 +3863,17 @@ fn render_fold_icon_button( }) } +#[derive(Debug, Clone, Serialize, Deserialize)] +struct CopyMetadata { + creases: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct SelectedCreaseMetadata { + range_relative_to_selection: Range, + crease: CreaseMetadata, +} + impl EventEmitter for ContextEditor {} impl EventEmitter for ContextEditor {} @@ -4062,6 +3899,7 @@ impl Render for ContextEditor { .capture_action(cx.listener(ContextEditor::cancel)) .capture_action(cx.listener(ContextEditor::save)) .capture_action(cx.listener(ContextEditor::copy)) + .capture_action(cx.listener(ContextEditor::cut)) .capture_action(cx.listener(ContextEditor::paste)) .capture_action(cx.listener(ContextEditor::cycle_message_role)) .capture_action(cx.listener(ContextEditor::confirm_command)) @@ -4090,48 +3928,7 @@ impl Render for ContextEditor { .child(element), ) }) - .when_some(self.error_message.clone(), |this, error_message| { - this.child( - div() - .absolute() - .right_3() - .bottom_12() - .max_w_96() - .py_2() - .px_3() - .elevation_2(cx) - .occlude() - .child( - v_flex() - .gap_0p5() - .child( - h_flex() - .gap_1p5() - .items_center() - .child(Icon::new(IconName::XCircle).color(Color::Error)) - .child( - Label::new("Error interacting with language model") - .weight(FontWeight::MEDIUM), - ), - ) - .child( - div() - .id("error-message") - .max_h_24() - .overflow_y_scroll() - .child(Label::new(error_message)), - ) - .child(h_flex().justify_end().mt_1().child( - Button::new("dismiss", "Dismiss").on_click(cx.listener( - |this, _, cx| { - this.error_message = None; - cx.notify(); - }, - )), - )), - ), - ) - }) + .children(self.render_last_error(cx)) .child( h_flex().w_full().relative().child( h_flex() @@ -4523,6 +4320,20 @@ impl Render for ContextEditorToolbarItem { let weak_self = cx.view().downgrade(); let right_side = h_flex() .gap_2() + // TODO display this in a nicer way, once we have a design for it. + // .children({ + // let project = self + // .workspace + // .upgrade() + // .map(|workspace| workspace.read(cx).project().downgrade()); + // + // let scan_items_remaining = cx.update_global(|db: &mut SemanticDb, cx| { + // project.and_then(|project| db.remaining_summaries(&project, cx)) + // }); + + // scan_items_remaining + // .map(|remaining_items| format!("Files to scan: {}", remaining_items)) + // }) .child( ModelSelector::new( self.fs.clone(), @@ -4741,33 +4552,6 @@ pub enum WorkflowAssistStatus { Idle, } -impl WorkflowAssist { - pub fn status(&self, cx: &AppContext) -> WorkflowAssistStatus { - let assistant = InlineAssistant::global(cx); - if self - .assist_ids - .iter() - .any(|assist_id| assistant.assist_status(*assist_id, cx).is_pending()) - { - WorkflowAssistStatus::Pending - } else if self - .assist_ids - .iter() - .all(|assist_id| assistant.assist_status(*assist_id, cx).is_confirmed()) - { - WorkflowAssistStatus::Confirmed - } else if self - .assist_ids - .iter() - .all(|assist_id| assistant.assist_status(*assist_id, cx).is_done()) - { - WorkflowAssistStatus::Done - } else { - WorkflowAssistStatus::Idle - } - } -} - impl Render for ContextHistory { fn render(&mut self, _: &mut ViewContext) -> impl IntoElement { div().size_full().child(self.picker.clone()) @@ -5021,7 +4805,7 @@ fn quote_selection_fold_placeholder(title: String, editor: WeakView) -> ButtonLike::new(fold_id) .style(ButtonStyle::Filled) .layer(ElevationIndex::ElevatedSurface) - .child(Icon::new(IconName::TextSelect)) + .child(Icon::new(IconName::TextSnippet)) .child(Label::new(title.clone()).single_line()) .on_click(move |_, cx| { editor @@ -5125,7 +4909,7 @@ fn render_docs_slash_command_trailer( div() .id(("latest-error", row.0)) .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) @@ -5147,16 +4931,21 @@ fn render_docs_slash_command_trailer( fn make_lsp_adapter_delegate( project: &Model, cx: &mut AppContext, -) -> Result> { +) -> Result>> { project.update(cx, |project, cx| { // TODO: Find the right worktree. - let worktree = project - .worktrees(cx) - .next() - .ok_or_else(|| anyhow!("no worktrees when constructing ProjectLspAdapterDelegate"))?; + let Some(worktree) = project.worktrees(cx).next() else { + return Ok(None::>); + }; + let http_client = project.client().http_client().clone(); project.lsp_store().update(cx, |lsp_store, cx| { - Ok(ProjectLspAdapterDelegate::new(lsp_store, &worktree, cx) - as Arc) + Ok(Some(LocalLspAdapterDelegate::new( + lsp_store, + &worktree, + http_client, + project.fs().clone(), + cx, + ) as Arc)) }) }) } @@ -5260,3 +5049,85 @@ fn configuration_error(cx: &AppContext) -> Option { None } + +#[cfg(test)] +mod tests { + use super::*; + use gpui::{AppContext, Context}; + use language::Buffer; + use unindent::Unindent; + + #[gpui::test] + fn test_find_code_blocks(cx: &mut AppContext) { + let markdown = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + + let buffer = cx.new_model(|cx| { + let text = r#" + line 0 + line 1 + ```rust + fn main() {} + ``` + line 5 + line 6 + line 7 + ```go + func main() {} + ``` + line 11 + ``` + this is plain text code block + ``` + + ```go + func another() {} + ``` + line 19 + "# + .unindent(); + let mut buffer = Buffer::local(text, cx); + buffer.set_language(Some(markdown.clone()), cx); + buffer + }); + let snapshot = buffer.read(cx).snapshot(); + + let code_blocks = vec![ + Point::new(3, 0)..Point::new(4, 0), + Point::new(9, 0)..Point::new(10, 0), + Point::new(13, 0)..Point::new(14, 0), + Point::new(17, 0)..Point::new(18, 0), + ] + .into_iter() + .map(|range| snapshot.point_to_offset(range.start)..snapshot.point_to_offset(range.end)) + .collect::>(); + + let expected_results = vec![ + (0, None), + (1, None), + (2, Some(code_blocks[0].clone())), + (3, Some(code_blocks[0].clone())), + (4, Some(code_blocks[0].clone())), + (5, None), + (6, None), + (7, None), + (8, Some(code_blocks[1].clone())), + (9, Some(code_blocks[1].clone())), + (10, Some(code_blocks[1].clone())), + (11, None), + (12, Some(code_blocks[2].clone())), + (13, Some(code_blocks[2].clone())), + (14, Some(code_blocks[2].clone())), + (15, None), + (16, Some(code_blocks[3].clone())), + (17, Some(code_blocks[3].clone())), + (18, Some(code_blocks[3].clone())), + (19, None), + ]; + + for (row, expected) in expected_results { + let offset = snapshot.point_to_offset(Point::new(row, 0)); + let range = find_surrounding_code_block(&snapshot, offset); + assert_eq!(range, expected, "unexpected result on row {:?}", row); + } + } +} diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index d57c1f19b685c0..2bab6a9624f314 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -2,6 +2,7 @@ use std::sync::Arc; use ::open_ai::Model as OpenAiModel; use anthropic::Model as AnthropicModel; +use feature_flags::FeatureFlagAppExt; use fs::Fs; use gpui::{AppContext, Pixels}; use language_model::provider::open_ai; @@ -59,7 +60,15 @@ pub struct AssistantSettings { pub default_width: Pixels, pub default_height: Pixels, pub default_model: LanguageModelSelection, + pub inline_alternatives: Vec, pub using_outdated_settings_version: bool, + pub enable_experimental_live_diffs: bool, +} + +impl AssistantSettings { + pub fn are_live_diffs_enabled(&self, cx: &AppContext) -> bool { + cx.is_staff() || self.enable_experimental_live_diffs + } } /// Assistant panel settings @@ -160,12 +169,16 @@ impl AssistantSettingsContent { .filter_map(|model| match model { OpenAiModel::Custom { name, + display_name, max_tokens, max_output_tokens, + max_completion_tokens: None, } => Some(open_ai::AvailableModel { name, + display_name, max_tokens, max_output_tokens, + max_completion_tokens: None, }), _ => None, }) @@ -232,6 +245,8 @@ impl AssistantSettingsContent { }) } }), + inline_alternatives: None, + enable_experimental_live_diffs: None, }, VersionedAssistantSettingsContent::V2(settings) => settings.clone(), }, @@ -250,6 +265,8 @@ impl AssistantSettingsContent { .id() .to_string(), }), + inline_alternatives: None, + enable_experimental_live_diffs: None, }, } } @@ -365,6 +382,8 @@ impl Default for VersionedAssistantSettingsContent { default_width: None, default_height: None, default_model: None, + inline_alternatives: None, + enable_experimental_live_diffs: None, }) } } @@ -393,6 +412,12 @@ pub struct AssistantSettingsContentV2 { default_height: Option, /// The default model to use when creating new contexts. default_model: Option, + /// Additional models with which to generate alternatives when performing inline assists. + inline_alternatives: Option>, + /// Enable experimental live diffs in the assistant panel. + /// + /// Default: false + enable_experimental_live_diffs: Option, } #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] @@ -513,9 +538,11 @@ impl Settings for AssistantSettings { &mut settings.default_height, value.default_height.map(Into::into), ); + merge(&mut settings.default_model, value.default_model); + merge(&mut settings.inline_alternatives, value.inline_alternatives); merge( - &mut settings.default_model, - value.default_model.map(Into::into), + &mut settings.enable_experimental_live_diffs, + value.enable_experimental_live_diffs, ); } @@ -569,11 +596,13 @@ mod tests { provider: "test-provider".into(), model: "gpt-99".into(), }), + inline_alternatives: None, enabled: None, button: None, dock: None, default_width: None, default_height: None, + enable_experimental_live_diffs: None, }), ) }, diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 1bf846369b7627..2818411d0d34c5 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -2,8 +2,8 @@ mod context_tests; use crate::{ - prompts::PromptBuilder, slash_command::SlashCommandLine, MessageId, MessageStatus, - WorkflowStep, WorkflowStepEdit, WorkflowStepResolution, WorkflowSuggestionGroup, + prompts::PromptBuilder, slash_command::SlashCommandLine, AssistantEdit, AssistantPatch, + AssistantPatchStatus, MessageId, MessageStatus, }; use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{ @@ -15,30 +15,27 @@ use clock::ReplicaId; use collections::{HashMap, HashSet}; use feature_flags::{FeatureFlag, FeatureFlagAppExt}; use fs::{Fs, RemoveOptions}; -use futures::{ - future::{self, Shared}, - stream::FuturesUnordered, - FutureExt, StreamExt, -}; +use futures::{future::Shared, FutureExt, StreamExt}; use gpui::{ - AppContext, AsyncAppContext, Context as _, EventEmitter, Image, Model, ModelContext, - RenderImage, SharedString, Subscription, Task, + AppContext, Context as _, EventEmitter, Model, ModelContext, RenderImage, SharedString, + Subscription, Task, }; use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset}; use language_model::{ + provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError}, LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent, LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelRequestTool, MessageContent, Role, StopReason, + LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role, + StopReason, }; use open_ai::Model as OpenAiModel; -use paths::{context_images_dir, contexts_dir}; +use paths::contexts_dir; use project::Project; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::{ - cmp::{self, max, Ordering}, - collections::hash_map, + cmp::{max, Ordering}, fmt::Debug, iter, mem, ops::Range, @@ -47,7 +44,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use telemetry_events::AssistantKind; +use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase}; use text::BufferSnapshot; use util::{post_inc, ResultExt, TryFutureExt}; use uuid::Uuid; @@ -163,6 +160,9 @@ impl ContextOperation { )?, icon: section.icon_name.parse()?, label: section.label.into(), + metadata: section + .metadata + .and_then(|metadata| serde_json::from_str(&metadata).log_err()), }) }) .collect::>>()?, @@ -243,6 +243,9 @@ impl ContextOperation { )), icon_name: icon_name.to_string(), label: section.label.to_string(), + metadata: section.metadata.as_ref().and_then(|metadata| { + serde_json::to_string(metadata).log_err() + }), } }) .collect(), @@ -289,10 +292,12 @@ impl ContextOperation { #[derive(Debug, Clone)] pub enum ContextEvent { ShowAssistError(SharedString), + ShowPaymentRequiredError, + ShowMaxMonthlySpendReachedError, MessagesEdited, SummaryChanged, StreamedCompletion, - WorkflowStepsUpdated { + PatchesUpdated { removed: Vec>, updated: Vec>, }, @@ -377,23 +382,8 @@ impl MessageMetadata { } } -#[derive(Clone, Debug)] -pub struct MessageImage { - image_id: u64, - image: Shared>>, -} - -impl PartialEq for MessageImage { - fn eq(&self, other: &Self) -> bool { - self.image_id == other.image_id - } -} - -impl Eq for MessageImage {} - #[derive(Clone, Debug)] pub struct Message { - pub image_offsets: SmallVec<[(usize, MessageImage); 1]>, pub offset_range: Range, pub index_range: Range, pub anchor_range: Range, @@ -403,62 +393,45 @@ pub struct Message { pub cache: Option, } -impl Message { - fn to_request_message(&self, buffer: &Buffer) -> Option { - let mut content = Vec::new(); - - let mut range_start = self.offset_range.start; - for (image_offset, message_image) in self.image_offsets.iter() { - if *image_offset != range_start { - if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) { - content.push(text); - } - } - - if let Some(image) = message_image.image.clone().now_or_never().flatten() { - content.push(language_model::MessageContent::Image(image)); - } - - range_start = *image_offset; - } - - if range_start != self.offset_range.end { - if let Some(text) = - Self::collect_text_content(buffer, range_start..self.offset_range.end) - { - content.push(text); - } - } +#[derive(Debug, Clone)] +pub enum Content { + Image { + anchor: language::Anchor, + image_id: u64, + render_image: Arc, + image: Shared>>, + }, + ToolUse { + range: Range, + tool_use: LanguageModelToolUse, + }, + ToolResult { + range: Range, + tool_use_id: Arc, + }, +} - if content.is_empty() { - return None; +impl Content { + fn range(&self) -> Range { + match self { + Self::Image { anchor, .. } => *anchor..*anchor, + Self::ToolUse { range, .. } | Self::ToolResult { range, .. } => range.clone(), } - - Some(LanguageModelRequestMessage { - role: self.role, - content, - cache: self.cache.as_ref().map_or(false, |cache| cache.is_anchor), - }) } - fn collect_text_content(buffer: &Buffer, range: Range) -> Option { - let text: String = buffer.text_for_range(range.clone()).collect(); - if text.trim().is_empty() { - None + fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering { + let self_range = self.range(); + let other_range = other.range(); + if self_range.end.cmp(&other_range.start, buffer).is_lt() { + Ordering::Less + } else if self_range.start.cmp(&other_range.end, buffer).is_gt() { + Ordering::Greater } else { - Some(MessageContent::Text(text)) + Ordering::Equal } } } -#[derive(Clone, Debug)] -pub struct ImageAnchor { - pub anchor: language::Anchor, - pub image_id: u64, - pub render_image: Arc, - pub image: Shared>>, -} - struct PendingCompletion { id: usize, assistant_message_id: MessageId, @@ -478,13 +451,14 @@ pub struct XmlTag { #[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)] #[strum(serialize_all = "snake_case")] pub enum XmlTagKind { - Step, + Patch, + Title, Edit, Path, - Search, - Within, - Operation, Description, + OldText, + NewText, + Operation, } pub struct Context { @@ -500,8 +474,7 @@ pub struct Context { slash_command_output_sections: Vec>, pending_tool_uses_by_id: HashMap, PendingToolUse>, message_anchors: Vec, - images: HashMap, Shared>>)>, - image_anchors: Vec, + contents: Vec, messages_metadata: HashMap, summary: Option, pending_summary: Task>, @@ -515,7 +488,7 @@ pub struct Context { _subscriptions: Vec, telemetry: Option>, language_registry: Arc, - workflow_steps: Vec, + patches: Vec, xml_tags: Vec, project: Option>, prompt_builder: Arc, @@ -531,7 +504,7 @@ impl ContextAnnotation for PendingSlashCommand { } } -impl ContextAnnotation for WorkflowStep { +impl ContextAnnotation for AssistantPatch { fn range(&self) -> &Range { &self.range } @@ -577,7 +550,7 @@ impl Context { cx: &mut ModelContext, ) -> Self { let buffer = cx.new_model(|_cx| { - let mut buffer = Buffer::remote( + let buffer = Buffer::remote( language::BufferId::new(1).unwrap(), replica_id, capability, @@ -595,8 +568,7 @@ impl Context { pending_ops: Vec::new(), operations: Vec::new(), message_anchors: Default::default(), - image_anchors: Default::default(), - images: Default::default(), + contents: Default::default(), messages_metadata: Default::default(), pending_slash_commands: Vec::new(), finished_slash_commands: HashSet::default(), @@ -617,7 +589,7 @@ impl Context { telemetry, project, language_registry, - workflow_steps: Vec::new(), + patches: Vec::new(), xml_tags: Vec::new(), prompt_builder, }; @@ -659,11 +631,6 @@ impl Context { id: message.id, start: message.offset_range.start, metadata: self.messages_metadata[&message.id].clone(), - image_offsets: message - .image_offsets - .iter() - .map(|image_offset| (image_offset.0, image_offset.1.image_id)) - .collect(), }) .collect(), summary: self @@ -675,12 +642,13 @@ impl Context { .slash_command_output_sections .iter() .filter_map(|section| { - let range = section.range.to_offset(buffer); - if section.range.start.is_valid(buffer) && !range.is_empty() { + if section.is_valid(buffer) { + let range = section.range.to_offset(buffer); Some(assistant_slash_command::SlashCommandOutputSection { range, icon: section.icon, label: section.label.clone(), + metadata: section.metadata.clone(), }) } else { None @@ -716,7 +684,7 @@ impl Context { buffer.set_text(saved_context.text.as_str(), cx) }); let operations = saved_context.into_ops(&this.buffer, cx); - this.apply_ops(operations, cx).unwrap(); + this.apply_ops(operations, cx); this } @@ -789,7 +757,7 @@ impl Context { &mut self, ops: impl IntoIterator, cx: &mut ModelContext, - ) -> Result<()> { + ) { let mut buffer_ops = Vec::new(); for op in ops { match op { @@ -798,10 +766,8 @@ impl Context { } } self.buffer - .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?; + .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx)); self.flush_ops(cx); - - Ok(()) } fn flush_ops(&mut self, cx: &mut ModelContext) { @@ -961,48 +927,49 @@ impl Context { self.summary.as_ref() } - pub(crate) fn workflow_step_containing( + pub(crate) fn patch_containing( &self, - offset: usize, + position: Point, cx: &AppContext, - ) -> Option<&WorkflowStep> { + ) -> Option<&AssistantPatch> { let buffer = self.buffer.read(cx); - let index = self - .workflow_steps - .binary_search_by(|step| { - let step_range = step.range.to_offset(&buffer); - if offset < step_range.start { - Ordering::Greater - } else if offset > step_range.end { - Ordering::Less - } else { - Ordering::Equal - } - }) - .ok()?; - Some(&self.workflow_steps[index]) + let index = self.patches.binary_search_by(|patch| { + let patch_range = patch.range.to_point(&buffer); + if position < patch_range.start { + Ordering::Greater + } else if position > patch_range.end { + Ordering::Less + } else { + Ordering::Equal + } + }); + if let Ok(ix) = index { + Some(&self.patches[ix]) + } else { + None + } } - pub fn workflow_step_ranges(&self) -> impl Iterator> + '_ { - self.workflow_steps.iter().map(|step| step.range.clone()) + pub fn patch_ranges(&self) -> impl Iterator> + '_ { + self.patches.iter().map(|patch| patch.range.clone()) } - pub(crate) fn workflow_step_for_range( + pub(crate) fn patch_for_range( &self, range: &Range, cx: &AppContext, - ) -> Option<&WorkflowStep> { + ) -> Option<&AssistantPatch> { let buffer = self.buffer.read(cx); - let index = self.workflow_step_index_for_range(range, buffer).ok()?; - Some(&self.workflow_steps[index]) + let index = self.patch_index_for_range(range, buffer).ok()?; + Some(&self.patches[index]) } - fn workflow_step_index_for_range( + fn patch_index_for_range( &self, tagged_range: &Range, buffer: &text::BufferSnapshot, ) -> Result { - self.workflow_steps + self.patches .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer)) } @@ -1037,18 +1004,19 @@ impl Context { fn handle_buffer_event( &mut self, _: Model, - event: &language::Event, + event: &language::BufferEvent, cx: &mut ModelContext, ) { match event { - language::Event::Operation(operation) => cx.emit(ContextEvent::Operation( - ContextOperation::BufferOperation(operation.clone()), - )), - language::Event::Edited => { + language::BufferEvent::Operation { + operation, + is_local: true, + } => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation( + operation.clone(), + ))), + language::BufferEvent::Edited => { self.count_remaining_tokens(cx); self.reparse(cx); - // Use `inclusive = true` to invalidate a step when an edit occurs - // at the start/end of a parsed step. cx.emit(ContextEvent::MessagesEdited); } _ => {} @@ -1277,8 +1245,8 @@ impl Context { let mut removed_slash_command_ranges = Vec::new(); let mut updated_slash_commands = Vec::new(); - let mut removed_steps = Vec::new(); - let mut updated_steps = Vec::new(); + let mut removed_patches = Vec::new(); + let mut updated_patches = Vec::new(); while let Some(mut row_range) = row_ranges.next() { while let Some(next_row_range) = row_ranges.peek() { if row_range.end >= next_row_range.start { @@ -1302,11 +1270,11 @@ impl Context { &mut removed_slash_command_ranges, cx, ); - self.reparse_workflow_steps_in_range( + self.reparse_patches_in_range( start..end, &buffer, - &mut updated_steps, - &mut removed_steps, + &mut updated_patches, + &mut removed_patches, cx, ); } @@ -1318,10 +1286,10 @@ impl Context { }); } - if !updated_steps.is_empty() || !removed_steps.is_empty() { - cx.emit(ContextEvent::WorkflowStepsUpdated { - removed: removed_steps, - updated: updated_steps, + if !updated_patches.is_empty() || !removed_patches.is_empty() { + cx.emit(ContextEvent::PatchesUpdated { + removed: removed_patches, + updated: updated_patches, }); } } @@ -1383,7 +1351,7 @@ impl Context { removed.extend(removed_commands.map(|command| command.source_range)); } - fn reparse_workflow_steps_in_range( + fn reparse_patches_in_range( &mut self, range: Range, buffer: &BufferSnapshot, @@ -1398,41 +1366,32 @@ impl Context { self.xml_tags .splice(intersecting_tags_range.clone(), new_tags); - // Find which steps intersect the changed range. - let intersecting_steps_range = - self.indices_intersecting_buffer_range(&self.workflow_steps, range.clone(), cx); + // Find which patches intersect the changed range. + let intersecting_patches_range = + self.indices_intersecting_buffer_range(&self.patches, range.clone(), cx); - // Reparse all tags after the last unchanged step before the change. + // Reparse all tags after the last unchanged patch before the change. let mut tags_start_ix = 0; - if let Some(preceding_unchanged_step) = - self.workflow_steps[..intersecting_steps_range.start].last() + if let Some(preceding_unchanged_patch) = + self.patches[..intersecting_patches_range.start].last() { tags_start_ix = match self.xml_tags.binary_search_by(|tag| { tag.range .start - .cmp(&preceding_unchanged_step.range.end, buffer) + .cmp(&preceding_unchanged_patch.range.end, buffer) .then(Ordering::Less) }) { Ok(ix) | Err(ix) => ix, }; } - // Rebuild the edit suggestions in the range. - let mut new_steps = self.parse_steps(tags_start_ix, range.end, buffer); - - if let Some(project) = self.project() { - for step in &mut new_steps { - Self::resolve_workflow_step_internal(step, &project, cx); - } - } - - updated.extend(new_steps.iter().map(|step| step.range.clone())); - let removed_steps = self - .workflow_steps - .splice(intersecting_steps_range, new_steps); + // Rebuild the patches in the range. + let new_patches = self.parse_patches(tags_start_ix, range.end, buffer, cx); + updated.extend(new_patches.iter().map(|patch| patch.range.clone())); + let removed_patches = self.patches.splice(intersecting_patches_range, new_patches); removed.extend( - removed_steps - .map(|step| step.range) + removed_patches + .map(|patch| patch.range) .filter(|range| !updated.contains(&range)), ); } @@ -1493,60 +1452,95 @@ impl Context { tags } - fn parse_steps( + fn parse_patches( &mut self, tags_start_ix: usize, buffer_end: text::Anchor, buffer: &BufferSnapshot, - ) -> Vec { - let mut new_steps = Vec::new(); - let mut pending_step = None; - let mut edit_step_depth = 0; + cx: &AppContext, + ) -> Vec { + let mut new_patches = Vec::new(); + let mut pending_patch = None; + let mut patch_tag_depth = 0; let mut tags = self.xml_tags[tags_start_ix..].iter().peekable(); 'tags: while let Some(tag) = tags.next() { - if tag.range.start.cmp(&buffer_end, buffer).is_gt() && edit_step_depth == 0 { + if tag.range.start.cmp(&buffer_end, buffer).is_gt() && patch_tag_depth == 0 { break; } - if tag.kind == XmlTagKind::Step && tag.is_open_tag { - edit_step_depth += 1; - let edit_start = tag.range.start; - let mut edits = Vec::new(); - let mut step = WorkflowStep { - range: edit_start..edit_start, - leading_tags_end: tag.range.end, - trailing_tag_start: None, + if tag.kind == XmlTagKind::Patch && tag.is_open_tag { + patch_tag_depth += 1; + let patch_start = tag.range.start; + let mut edits = Vec::>::new(); + let mut patch = AssistantPatch { + range: patch_start..patch_start, + title: String::new().into(), edits: Default::default(), - resolution: None, - resolution_task: None, + status: crate::AssistantPatchStatus::Pending, }; while let Some(tag) = tags.next() { - step.trailing_tag_start.get_or_insert(tag.range.start); - - if tag.kind == XmlTagKind::Step && !tag.is_open_tag { - // step.trailing_tag_start = Some(tag.range.start); - edit_step_depth -= 1; - if edit_step_depth == 0 { - step.range.end = tag.range.end; - step.edits = edits.into(); - new_steps.push(step); + if tag.kind == XmlTagKind::Patch && !tag.is_open_tag { + patch_tag_depth -= 1; + if patch_tag_depth == 0 { + patch.range.end = tag.range.end; + + // Include the line immediately after this tag if it's empty. + let patch_end_offset = patch.range.end.to_offset(buffer); + let mut patch_end_chars = buffer.chars_at(patch_end_offset); + if patch_end_chars.next() == Some('\n') + && patch_end_chars.next().map_or(true, |ch| ch == '\n') + { + let messages = self.messages_for_offsets( + [patch_end_offset, patch_end_offset + 1], + cx, + ); + if messages.len() == 1 { + patch.range.end = buffer.anchor_before(patch_end_offset + 1); + } + } + + edits.sort_unstable_by(|a, b| { + if let (Ok(a), Ok(b)) = (a, b) { + a.path.cmp(&b.path) + } else { + Ordering::Equal + } + }); + patch.edits = edits.into(); + patch.status = AssistantPatchStatus::Ready; + new_patches.push(patch); continue 'tags; } } + if tag.kind == XmlTagKind::Title && tag.is_open_tag { + let content_start = tag.range.end; + while let Some(tag) = tags.next() { + if tag.kind == XmlTagKind::Title && !tag.is_open_tag { + let content_end = tag.range.start; + patch.title = + trimmed_text_in_range(buffer, content_start..content_end) + .into(); + break; + } + } + } + if tag.kind == XmlTagKind::Edit && tag.is_open_tag { let mut path = None; - let mut search = None; + let mut old_text = None; + let mut new_text = None; let mut operation = None; let mut description = None; while let Some(tag) = tags.next() { if tag.kind == XmlTagKind::Edit && !tag.is_open_tag { - edits.push(WorkflowStepEdit::new( + edits.push(AssistantEdit::new( path, operation, - search, + old_text, + new_text, description, )); break; @@ -1555,7 +1549,8 @@ impl Context { if tag.is_open_tag && [ XmlTagKind::Path, - XmlTagKind::Search, + XmlTagKind::OldText, + XmlTagKind::NewText, XmlTagKind::Operation, XmlTagKind::Description, ] @@ -1567,15 +1562,18 @@ impl Context { if tag.kind == kind && !tag.is_open_tag { let tag = tags.next().unwrap(); let content_end = tag.range.start; - let mut content = buffer - .text_for_range(content_start..content_end) - .collect::(); - content.truncate(content.trim_end().len()); + let content = trimmed_text_in_range( + buffer, + content_start..content_end, + ); match kind { XmlTagKind::Path => path = Some(content), XmlTagKind::Operation => operation = Some(content), - XmlTagKind::Search => { - search = Some(content).filter(|s| !s.is_empty()) + XmlTagKind::OldText => { + old_text = Some(content).filter(|s| !s.is_empty()) + } + XmlTagKind::NewText => { + new_text = Some(content).filter(|s| !s.is_empty()) } XmlTagKind::Description => { description = @@ -1590,162 +1588,28 @@ impl Context { } } - pending_step = Some(step); - } - } - - if let Some(mut pending_step) = pending_step { - pending_step.range.end = text::Anchor::MAX; - new_steps.push(pending_step); - } - - new_steps - } - - pub fn resolve_workflow_step( - &mut self, - tagged_range: Range, - cx: &mut ModelContext, - ) -> Option<()> { - let index = self - .workflow_step_index_for_range(&tagged_range, self.buffer.read(cx)) - .ok()?; - let step = &mut self.workflow_steps[index]; - let project = self.project.as_ref()?; - step.resolution.take(); - Self::resolve_workflow_step_internal(step, project, cx); - None - } - - fn resolve_workflow_step_internal( - step: &mut WorkflowStep, - project: &Model, - cx: &mut ModelContext<'_, Context>, - ) { - step.resolution_task = Some(cx.spawn({ - let range = step.range.clone(); - let edits = step.edits.clone(); - let project = project.clone(); - |this, mut cx| async move { - let suggestion_groups = - Self::compute_step_resolution(project, edits, &mut cx).await; - - this.update(&mut cx, |this, cx| { - let buffer = this.buffer.read(cx).text_snapshot(); - let ix = this.workflow_step_index_for_range(&range, &buffer).ok(); - if let Some(ix) = ix { - let step = &mut this.workflow_steps[ix]; - - let resolution = suggestion_groups.map(|suggestion_groups| { - let mut title = String::new(); - for mut chunk in buffer.text_for_range( - step.leading_tags_end - ..step.trailing_tag_start.unwrap_or(step.range.end), - ) { - if title.is_empty() { - chunk = chunk.trim_start(); - } - if let Some((prefix, _)) = chunk.split_once('\n') { - title.push_str(prefix); - break; - } else { - title.push_str(chunk); - } - } - - WorkflowStepResolution { - title, - suggestion_groups, - } - }); - - step.resolution = Some(Arc::new(resolution)); - cx.emit(ContextEvent::WorkflowStepsUpdated { - removed: vec![], - updated: vec![range], - }) - } - }) - .ok(); + patch.edits = edits.into(); + pending_patch = Some(patch); } - })); - } - - async fn compute_step_resolution( - project: Model, - edits: Arc<[Result]>, - cx: &mut AsyncAppContext, - ) -> Result, Vec>> { - let mut suggestion_tasks = Vec::new(); - for edit in edits.iter() { - let edit = edit.as_ref().map_err(|e| anyhow!("{e}"))?; - suggestion_tasks.push(edit.resolve(project.clone(), cx.clone())); - } - - // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges. - let suggestions = future::try_join_all(suggestion_tasks).await?; - - let mut suggestions_by_buffer = HashMap::default(); - for (buffer, suggestion) in suggestions { - suggestions_by_buffer - .entry(buffer) - .or_insert_with(Vec::new) - .push(suggestion); } - let mut suggestion_groups_by_buffer = HashMap::default(); - for (buffer, mut suggestions) in suggestions_by_buffer { - let mut suggestion_groups = Vec::::new(); - let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?; - // Sort suggestions by their range so that earlier, larger ranges come first - suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot)); - - // Merge overlapping suggestions - suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot)); - - // Create context ranges for each suggestion - for suggestion in suggestions { - let context_range = { - let suggestion_point_range = suggestion.range().to_point(&snapshot); - let start_row = suggestion_point_range.start.row.saturating_sub(5); - let end_row = - cmp::min(suggestion_point_range.end.row + 5, snapshot.max_point().row); - let start = snapshot.anchor_before(Point::new(start_row, 0)); - let end = - snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row))); - start..end - }; - - if let Some(last_group) = suggestion_groups.last_mut() { - if last_group - .context_range - .end - .cmp(&context_range.start, &snapshot) - .is_ge() - { - // Merge with the previous group if context ranges overlap - last_group.context_range.end = context_range.end; - last_group.suggestions.push(suggestion); - } else { - // Create a new group - suggestion_groups.push(WorkflowSuggestionGroup { - context_range, - suggestions: vec![suggestion], - }); - } + if let Some(mut pending_patch) = pending_patch { + let patch_start = pending_patch.range.start.to_offset(buffer); + if let Some(message) = self.message_for_offset(patch_start, cx) { + if message.anchor_range.end == text::Anchor::MAX { + pending_patch.range.end = text::Anchor::MAX; } else { - // Create the first group - suggestion_groups.push(WorkflowSuggestionGroup { - context_range, - suggestions: vec![suggestion], - }); + let message_end = buffer.anchor_after(message.offset_range.end - 1); + pending_patch.range.end = message_end; } + } else { + pending_patch.range.end = text::Anchor::MAX; } - suggestion_groups_by_buffer.insert(buffer, suggestion_groups); + new_patches.push(pending_patch); } - Ok(suggestion_groups_by_buffer) + new_patches } pub fn pending_command_for_position( @@ -1865,6 +1729,7 @@ impl Context { ..buffer.anchor_before(start + section.range.end), icon: section.icon, label: section.label, + metadata: section.metadata, }) .collect::>(); sections.sort_by(|a, b| a.range.cmp(&b.range, buffer)); @@ -1957,6 +1822,14 @@ impl Context { output_range }); + this.insert_content( + Content::ToolResult { + range: anchor_range.clone(), + tool_use_id: tool_use_id.clone(), + }, + cx, + ); + cx.emit(ContextEvent::ToolFinished { tool_use_id, output_range: anchor_range, @@ -1993,8 +1866,9 @@ impl Context { } pub fn assist(&mut self, cx: &mut ModelContext) -> Option { - let provider = LanguageModelRegistry::read_global(cx).active_provider()?; - let model = LanguageModelRegistry::read_global(cx).active_model()?; + let model_registry = LanguageModelRegistry::read_global(cx); + let provider = model_registry.active_provider()?; + let model = model_registry.active_model()?; let last_message_id = self.get_last_valid_message_id(cx)?; if !provider.is_authenticated(cx) { @@ -2038,6 +1912,7 @@ impl Context { let stream_completion = async { let request_start = Instant::now(); let mut events = stream.await?; + let mut stop_reason = StopReason::EndTurn; while let Some(event) = events.next().await { if response_latency.is_none() { @@ -2050,7 +1925,7 @@ impl Context { .message_anchors .iter() .position(|message| message.id == assistant_message_id)?; - let event_to_emit = this.buffer.update(cx, |buffer, cx| { + this.buffer.update(cx, |buffer, cx| { let message_old_end_offset = this.message_anchors[message_ix + 1..] .iter() .find(|message| message.start.is_valid(buffer)) @@ -2059,13 +1934,9 @@ impl Context { }); match event { - LanguageModelCompletionEvent::Stop(reason) => match reason { - StopReason::ToolUse => { - return Some(ContextEvent::UsePendingTools); - } - StopReason::EndTurn => {} - StopReason::MaxTokens => {} - }, + LanguageModelCompletionEvent::Stop(reason) => { + stop_reason = reason; + } LanguageModelCompletionEvent::Text(chunk) => { buffer.edit( [( @@ -2116,14 +1987,9 @@ impl Context { ); } } - - None }); cx.emit(ContextEvent::StreamedCompletion); - if let Some(event) = event_to_emit { - cx.emit(event); - } Some(()) })?; @@ -2136,39 +2002,69 @@ impl Context { this.update_cache_status_for_completion(cx); })?; - anyhow::Ok(()) + anyhow::Ok(stop_reason) }; let result = stream_completion.await; this.update(&mut cx, |this, cx| { - let error_message = result - .err() - .map(|error| error.to_string().trim().to_string()); - - if let Some(error_message) = error_message.as_ref() { - cx.emit(ContextEvent::ShowAssistError(SharedString::from( - error_message.clone(), - ))); - } - - this.update_metadata(assistant_message_id, cx, |metadata| { - if let Some(error_message) = error_message.as_ref() { - metadata.status = - MessageStatus::Error(SharedString::from(error_message.clone())); + let error_message = if let Some(error) = result.as_ref().err() { + if error.is::() { + cx.emit(ContextEvent::ShowPaymentRequiredError); + this.update_metadata(assistant_message_id, cx, |metadata| { + metadata.status = MessageStatus::Canceled; + }); + Some(error.to_string()) + } else if error.is::() { + cx.emit(ContextEvent::ShowMaxMonthlySpendReachedError); + this.update_metadata(assistant_message_id, cx, |metadata| { + metadata.status = MessageStatus::Canceled; + }); + Some(error.to_string()) } else { - metadata.status = MessageStatus::Done; + let error_message = error.to_string().trim().to_string(); + cx.emit(ContextEvent::ShowAssistError(SharedString::from( + error_message.clone(), + ))); + this.update_metadata(assistant_message_id, cx, |metadata| { + metadata.status = + MessageStatus::Error(SharedString::from(error_message.clone())); + }); + Some(error_message) } - }); + } else { + this.update_metadata(assistant_message_id, cx, |metadata| { + metadata.status = MessageStatus::Done; + }); + None + }; if let Some(telemetry) = this.telemetry.as_ref() { - telemetry.report_assistant_event( - Some(this.id.0.clone()), - AssistantKind::Panel, - model.telemetry_id(), + let language_name = this + .buffer + .read(cx) + .language() + .map(|language| language.name()); + telemetry.report_assistant_event(AssistantEvent { + conversation_id: Some(this.id.0.clone()), + kind: AssistantKind::Panel, + phase: AssistantPhase::Response, + model: model.telemetry_id(), + model_provider: model.provider_id().to_string(), response_latency, error_message, - ); + language_name: language_name.map(|name| name.to_proto()), + }); + } + + if let Ok(stop_reason) = result { + match stop_reason { + StopReason::ToolUse => { + cx.emit(ContextEvent::UsePendingTools); + } + StopReason::EndTurn => {} + StopReason::MaxTokens => {} + } } }) .ok(); @@ -2186,18 +2082,94 @@ impl Context { pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest { let buffer = self.buffer.read(cx); - let request_messages = self - .messages(cx) - .filter(|message| message.status == MessageStatus::Done) - .filter_map(|message| message.to_request_message(&buffer)) - .collect(); - LanguageModelRequest { - messages: request_messages, + let mut contents = self.contents(cx).peekable(); + + fn collect_text_content(buffer: &Buffer, range: Range) -> Option { + let text: String = buffer.text_for_range(range.clone()).collect(); + if text.trim().is_empty() { + None + } else { + Some(text) + } + } + + let mut completion_request = LanguageModelRequest { + messages: Vec::new(), tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, + }; + for message in self.messages(cx) { + if message.status != MessageStatus::Done { + continue; + } + + let mut offset = message.offset_range.start; + let mut request_message = LanguageModelRequestMessage { + role: message.role, + content: Vec::new(), + cache: message + .cache + .as_ref() + .map_or(false, |cache| cache.is_anchor), + }; + + while let Some(content) = contents.peek() { + if content + .range() + .end + .cmp(&message.anchor_range.end, buffer) + .is_lt() + { + let content = contents.next().unwrap(); + let range = content.range().to_offset(buffer); + request_message.content.extend( + collect_text_content(buffer, offset..range.start).map(MessageContent::Text), + ); + + match content { + Content::Image { image, .. } => { + if let Some(image) = image.clone().now_or_never().flatten() { + request_message + .content + .push(language_model::MessageContent::Image(image)); + } + } + Content::ToolUse { tool_use, .. } => { + request_message + .content + .push(language_model::MessageContent::ToolUse(tool_use.clone())); + } + Content::ToolResult { tool_use_id, .. } => { + request_message.content.push( + language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_use_id.to_string(), + is_error: false, + content: collect_text_content(buffer, range.clone()) + .unwrap_or_default(), + }, + ), + ); + } + } + + offset = range.end; + } else { + break; + } + } + + request_message.content.extend( + collect_text_content(buffer, offset..message.offset_range.end) + .map(MessageContent::Text), + ); + + completion_request.messages.push(request_message); } + + completion_request } pub fn cancel_last_assist(&mut self, cx: &mut ModelContext) -> bool { @@ -2236,11 +2208,11 @@ impl Context { let mut updated = Vec::new(); let mut removed = Vec::new(); for range in ranges { - self.reparse_workflow_steps_in_range(range, &buffer, &mut updated, &mut removed, cx); + self.reparse_patches_in_range(range, &buffer, &mut updated, &mut removed, cx); } if !updated.is_empty() || !removed.is_empty() { - cx.emit(ContextEvent::WorkflowStepsUpdated { removed, updated }) + cx.emit(ContextEvent::PatchesUpdated { removed, updated }) } } @@ -2324,53 +2296,31 @@ impl Context { } } - pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext) -> Option<()> { - if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) { - entry.insert(( - image.to_image_data(cx).log_err()?, - LanguageModelImage::from_image(image, cx).shared(), - )); - } - - Some(()) - } - - pub fn insert_image_anchor( - &mut self, - image_id: u64, - anchor: language::Anchor, - cx: &mut ModelContext, - ) -> bool { - cx.emit(ContextEvent::MessagesEdited); - + pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext) { let buffer = self.buffer.read(cx); let insertion_ix = match self - .image_anchors - .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer)) + .contents + .binary_search_by(|probe| probe.cmp(&content, buffer)) { - Ok(ix) => ix, + Ok(ix) => { + self.contents.remove(ix); + ix + } Err(ix) => ix, }; - - if let Some((render_image, image)) = self.images.get(&image_id) { - self.image_anchors.insert( - insertion_ix, - ImageAnchor { - anchor, - image_id, - image: image.clone(), - render_image: render_image.clone(), - }, - ); - - true - } else { - false - } + self.contents.insert(insertion_ix, content); + cx.emit(ContextEvent::MessagesEdited); } - pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator { - self.image_anchors.iter().cloned() + pub fn contents<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator { + let buffer = self.buffer.read(cx); + self.contents + .iter() + .filter(|content| { + let range = content.range(); + range.start.is_valid(buffer) && range.end.is_valid(buffer) + }) + .cloned() } pub fn split_message( @@ -2533,22 +2483,14 @@ impl Context { return; } - let messages = self - .messages(cx) - .filter_map(|message| message.to_request_message(self.buffer.read(cx))) - .chain(Some(LanguageModelRequestMessage { - role: Role::User, - content: vec![ - "Summarize the context into a short title without punctuation.".into(), - ], - cache: false, - })); - let request = LanguageModelRequest { - messages: messages.collect(), - tools: Vec::new(), - stop: Vec::new(), - temperature: 1.0, - }; + let mut request = self.to_completion_request(cx); + request.messages.push(LanguageModelRequestMessage { + role: Role::User, + content: vec![ + "Summarize the context into a short title without punctuation.".into(), + ], + cache: false, + }); self.pending_summary = cx.spawn(|this, mut cx| { async move { @@ -2648,10 +2590,8 @@ impl Context { cx: &'a AppContext, ) -> impl 'a + Iterator { let buffer = self.buffer.read(cx); - let messages = message_anchors.enumerate(); - let images = self.image_anchors.iter(); - Self::messages_from_iters(buffer, &self.messages_metadata, messages, images) + Self::messages_from_iters(buffer, &self.messages_metadata, message_anchors.enumerate()) } pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator { @@ -2662,10 +2602,8 @@ impl Context { buffer: &'a Buffer, metadata: &'a HashMap, messages: impl Iterator + 'a, - images: impl Iterator + 'a, ) -> impl 'a + Iterator { let mut messages = messages.peekable(); - let mut images = images.peekable(); iter::from_fn(move || { if let Some((start_ix, message_anchor)) = messages.next() { @@ -2686,22 +2624,6 @@ impl Context { let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX); let message_end = message_end_anchor.to_offset(buffer); - let mut image_offsets = SmallVec::new(); - while let Some(image_anchor) = images.peek() { - if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() { - image_offsets.push(( - image_anchor.anchor.to_offset(buffer), - MessageImage { - image_id: image_anchor.image_id, - image: image_anchor.image.clone(), - }, - )); - images.next(); - } else { - break; - } - } - return Some(Message { index_range: start_ix..end_ix, offset_range: message_start..message_end, @@ -2710,7 +2632,6 @@ impl Context { role: metadata.role, status: metadata.status.clone(), cache: metadata.cache.clone(), - image_offsets, }); } None @@ -2748,9 +2669,6 @@ impl Context { })?; if let Some(summary) = summary { - this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))? - .await; - let context = this.read_with(&cx, |this, cx| this.serialize(cx))?; let mut discriminant = 1; let mut new_path; @@ -2790,45 +2708,6 @@ impl Context { }); } - pub fn serialize_images(&self, fs: Arc, cx: &AppContext) -> Task<()> { - let mut images_to_save = self - .images - .iter() - .map(|(id, (_, llm_image))| { - let fs = fs.clone(); - let llm_image = llm_image.clone(); - let id = *id; - async move { - if let Some(llm_image) = llm_image.await { - let path: PathBuf = - context_images_dir().join(&format!("{}.png.base64", id)); - if fs - .metadata(path.as_path()) - .await - .log_err() - .flatten() - .is_none() - { - fs.atomic_write(path, llm_image.source.to_string()) - .await - .log_err(); - } - } - } - }) - .collect::>(); - cx.background_executor().spawn(async move { - if fs - .create_dir(context_images_dir().as_ref()) - .await - .log_err() - .is_some() - { - while let Some(_) = images_to_save.next().await {} - } - }) - } - pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext) { let timestamp = self.next_timestamp(); let summary = self.summary.get_or_insert(ContextSummary::default()); @@ -2839,6 +2718,24 @@ impl Context { } } +fn trimmed_text_in_range(buffer: &BufferSnapshot, range: Range) -> String { + let mut is_start = true; + let mut content = buffer + .text_for_range(range) + .map(|mut chunk| { + if is_start { + chunk = chunk.trim_start_matches('\n'); + if !chunk.is_empty() { + is_start = false; + } + } + chunk + }) + .collect::(); + content.truncate(content.trim_end().len()); + content +} + #[derive(Debug, Default)] pub struct ContextVersion { context: clock::Global, @@ -2914,9 +2811,6 @@ pub struct SavedMessage { pub id: MessageId, pub start: usize, pub metadata: MessageMetadata, - #[serde(default)] - // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field. - pub image_offsets: Vec<(usize, u64)>, } #[derive(Serialize, Deserialize)] @@ -3026,6 +2920,7 @@ impl SavedContext { ..buffer.anchor_before(section.range.end), icon: section.icon, label: section.label, + metadata: section.metadata, } }) .collect(), @@ -3102,7 +2997,6 @@ impl SavedContextV0_3_0 { timestamp, cache: None, }, - image_offsets: Vec::new(), }) }) .collect(), diff --git a/crates/assistant/src/context/context_tests.rs b/crates/assistant/src/context/context_tests.rs index c851ca7438f5b0..a11cfc375d21a4 100644 --- a/crates/assistant/src/context/context_tests.rs +++ b/crates/assistant/src/context/context_tests.rs @@ -1,8 +1,7 @@ -use super::{MessageCacheMetadata, WorkflowStepEdit}; +use super::{AssistantEdit, MessageCacheMetadata}; use crate::{ - assistant_panel, prompt_library, slash_command::file_command, CacheStatus, Context, - ContextEvent, ContextId, ContextOperation, MessageId, MessageStatus, PromptBuilder, - WorkflowStepEditKind, + assistant_panel, prompt_library, slash_command::file_command, AssistantEditKind, CacheStatus, + Context, ContextEvent, ContextId, ContextOperation, MessageId, MessageStatus, PromptBuilder, }; use anyhow::Result; use assistant_slash_command::{ @@ -12,9 +11,10 @@ use assistant_slash_command::{ use collections::HashSet; use fs::FakeFs; use gpui::{AppContext, Model, SharedString, Task, TestAppContext, WeakView}; -use language::{Buffer, LanguageRegistry, LspAdapterDelegate}; +use language::{Buffer, BufferSnapshot, LanguageRegistry, LspAdapterDelegate}; use language_model::{LanguageModelCacheConfiguration, LanguageModelRegistry, Role}; use parking_lot::Mutex; +use pretty_assertions::assert_eq; use project::Project; use rand::prelude::*; use serde_json::json; @@ -478,7 +478,15 @@ async fn test_slash_commands(cx: &mut TestAppContext) { #[gpui::test] async fn test_workflow_step_parsing(cx: &mut TestAppContext) { cx.update(prompt_library::init); - let settings_store = cx.update(SettingsStore::test); + let mut settings_store = cx.update(SettingsStore::test); + cx.update(|cx| { + settings_store + .set_user_settings( + r#"{ "assistant": { "enable_experimental_live_diffs": true } }"#, + cx, + ) + .unwrap() + }); cx.set_global(settings_store); cx.update(language::init); cx.update(Project::init_settings); @@ -520,7 +528,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { »", cx, ); - expect_steps( + expect_patches( &context, " @@ -539,17 +547,17 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { one two « - - Add a second function - - ```rust - fn two() {} - ``` - + »", cx, ); - expect_steps( + expect_patches( &context, " one two - « - Add a second function - - ```rust - fn two() {} - ``` - + « »", &[&[]], cx, ); - // The full suggestion is added + // The full patch is added edit( &context, " @@ -600,51 +596,46 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { one two - - Add a second function - - ```rust - fn two() {} - ``` - + « + add a `two` function src/lib.rs insert_after - fn one - add a `two` function + fn one + + fn two() {} + - + also,»", cx, ); - expect_steps( + expect_patches( &context, " one two - « - Add a second function - - ```rust - fn two() {} - ``` - + « + add a `two` function src/lib.rs insert_after - fn one - add a `two` function + fn one + + fn two() {} + - » - + + » also,", - &[&[WorkflowStepEdit { + &[&[AssistantEdit { path: "src/lib.rs".into(), - kind: WorkflowStepEditKind::InsertAfter { - search: "fn one".into(), + kind: AssistantEditKind::InsertAfter { + old_text: "fn one".into(), + new_text: "fn two() {}".into(), description: "add a `two` function".into(), }, }]], @@ -659,51 +650,46 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { one two - - Add a second function - - ```rust - fn two() {} - ``` - + + add a `two` function src/lib.rs insert_after - «fn zero» - add a `two` function + «fn zero» + + fn two() {} + - + also,", cx, ); - expect_steps( + expect_patches( &context, " one two - « - Add a second function - - ```rust - fn two() {} - ``` - + « + add a `two` function src/lib.rs insert_after - fn zero - add a `two` function + fn zero + + fn two() {} + - » - + + » also,", - &[&[WorkflowStepEdit { + &[&[AssistantEdit { path: "src/lib.rs".into(), - kind: WorkflowStepEditKind::InsertAfter { - search: "fn zero".into(), + kind: AssistantEditKind::InsertAfter { + old_text: "fn zero".into(), + new_text: "fn two() {}".into(), description: "add a `two` function".into(), }, }]], @@ -715,27 +701,24 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx); context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx); }); - expect_steps( + expect_patches( &context, " one two - - Add a second function - - ```rust - fn two() {} - ``` - + + add a `two` function src/lib.rs insert_after - fn zero - add a `two` function + fn zero + + fn two() {} + - + also,", &[], @@ -746,33 +729,31 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { context.update(cx, |context, cx| { context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx); }); - expect_steps( + expect_patches( &context, " one two - « - Add a second function - - ```rust - fn two() {} - ``` - + « + add a `two` function src/lib.rs insert_after - fn zero - add a `two` function + fn zero + + fn two() {} + - » - + + » also,", - &[&[WorkflowStepEdit { + &[&[AssistantEdit { path: "src/lib.rs".into(), - kind: WorkflowStepEditKind::InsertAfter { - search: "fn zero".into(), + kind: AssistantEditKind::InsertAfter { + old_text: "fn zero".into(), + new_text: "fn two() {}".into(), description: "add a `two` function".into(), }, }]], @@ -792,33 +773,31 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { cx, ) }); - expect_steps( + expect_patches( &deserialized_context, " one two - « - Add a second function - - ```rust - fn two() {} - ``` - + « + add a `two` function src/lib.rs insert_after - fn zero - add a `two` function + fn zero + + fn two() {} + - » - + + » also,", - &[&[WorkflowStepEdit { + &[&[AssistantEdit { path: "src/lib.rs".into(), - kind: WorkflowStepEditKind::InsertAfter { - search: "fn zero".into(), + kind: AssistantEditKind::InsertAfter { + old_text: "fn zero".into(), + new_text: "fn two() {}".into(), description: "add a `two` function".into(), }, }]], @@ -834,48 +813,58 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { cx.executor().run_until_parked(); } - fn expect_steps( + #[track_caller] + fn expect_patches( context: &Model, expected_marked_text: &str, - expected_suggestions: &[&[WorkflowStepEdit]], + expected_suggestions: &[&[AssistantEdit]], cx: &mut TestAppContext, ) { - context.update(cx, |context, cx| { - let expected_marked_text = expected_marked_text.unindent(); - let (expected_text, expected_ranges) = marked_text_ranges(&expected_marked_text, false); + let expected_marked_text = expected_marked_text.unindent(); + let (expected_text, _) = marked_text_ranges(&expected_marked_text, false); + + let (buffer_text, ranges, patches) = context.update(cx, |context, cx| { context.buffer.read_with(cx, |buffer, _| { - assert_eq!(buffer.text(), expected_text); let ranges = context - .workflow_steps + .patches .iter() .map(|entry| entry.range.to_offset(buffer)) .collect::>(); - let marked = generate_marked_text(&expected_text, &ranges, false); - assert_eq!( - marked, - expected_marked_text, - "unexpected suggestion ranges. actual: {ranges:?}, expected: {expected_ranges:?}" - ); - let suggestions = context - .workflow_steps - .iter() - .map(|step| { - step.edits - .iter() - .map(|edit| { - let edit = edit.as_ref().unwrap(); - WorkflowStepEdit { - path: edit.path.clone(), - kind: edit.kind.clone(), - } - }) - .collect::>() - }) - .collect::>(); - - assert_eq!(suggestions, expected_suggestions); - }); + ( + buffer.text(), + ranges, + context + .patches + .iter() + .map(|step| step.edits.clone()) + .collect::>(), + ) + }) }); + + assert_eq!(buffer_text, expected_text); + + let actual_marked_text = generate_marked_text(&expected_text, &ranges, false); + assert_eq!(actual_marked_text, expected_marked_text); + + assert_eq!( + patches + .iter() + .map(|patch| { + patch + .iter() + .map(|edit| { + let edit = edit.as_ref().unwrap(); + AssistantEdit { + path: edit.path.clone(), + kind: edit.kind.clone(), + } + }) + .collect::>() + }) + .collect::>(), + expected_suggestions + ); } } @@ -1089,6 +1078,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std range: section_start..section_end, icon: ui::IconName::Ai, label: "section".into(), + metadata: None, }); } @@ -1165,9 +1155,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std ); network.lock().broadcast(replica_id, ops_to_send); - context - .update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)) - .unwrap(); + context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)); } else if rng.gen_bool(0.1) && replica_id != 0 { log::info!("Context {}: disconnecting", context_index); network.lock().disconnect_peer(replica_id); @@ -1179,9 +1167,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std .map(ContextOperation::from_proto) .collect::>>() .unwrap(); - context - .update(cx, |context, cx| context.apply_ops(ops, cx)) - .unwrap(); + context.update(cx, |context, cx| context.apply_ops(ops, cx)); } } } @@ -1425,6 +1411,8 @@ impl SlashCommand for FakeSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, _cx: &mut WindowContext, diff --git a/crates/assistant/src/context_store.rs b/crates/assistant/src/context_store.rs index b6e1650c41dbf8..f4f03dda377ba7 100644 --- a/crates/assistant/src/context_store.rs +++ b/crates/assistant/src/context_store.rs @@ -2,7 +2,6 @@ use crate::{ prompts::PromptBuilder, Context, ContextEvent, ContextId, ContextOperation, ContextVersion, SavedContext, SavedContextMetadata, }; -use ::proto::AnyProtoClient; use anyhow::{anyhow, Context as _, Result}; use client::{proto, telemetry::Telemetry, Client, TypedEnvelope}; use clock::ReplicaId; @@ -16,6 +15,7 @@ use language::LanguageRegistry; use paths::contexts_dir; use project::Project; use regex::Regex; +use rpc::AnyProtoClient; use std::{ cmp::Reverse, ffi::OsStr, @@ -223,7 +223,7 @@ impl ContextStore { if let Some(context) = this.loaded_context_for_id(&context_id, cx) { let operation_proto = envelope.payload.operation.context("invalid operation")?; let operation = ContextOperation::from_proto(operation_proto)?; - context.update(cx, |context, cx| context.apply_ops([operation], cx))?; + context.update(cx, |context, cx| context.apply_ops([operation], cx)); } Ok(()) })? @@ -357,9 +357,6 @@ impl ContextStore { let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow!("project was not remote"))); }; - if project.is_local_or_ssh() { - return Task::ready(Err(anyhow!("cannot create remote contexts as the host"))); - } let replica_id = project.replica_id(); let capability = project.capability(); @@ -394,7 +391,7 @@ impl ContextStore { .collect::>>() }) .await?; - context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??; + context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; this.update(&mut cx, |this, cx| { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { existing_context @@ -488,9 +485,6 @@ impl ContextStore { let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow!("project was not remote"))); }; - if project.is_local_or_ssh() { - return Task::ready(Err(anyhow!("cannot open remote contexts as the host"))); - } if let Some(context) = self.loaded_context_for_id(&context_id, cx) { return Task::ready(Ok(context)); @@ -531,7 +525,7 @@ impl ContextStore { .collect::>>() }) .await?; - context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??; + context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; this.update(&mut cx, |this, cx| { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { existing_context diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index bfd85d2525aaae..a11d4113d87a91 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1,7 +1,7 @@ use crate::{ assistant_settings::AssistantSettings, humanize_token_count, prompts::PromptBuilder, - AssistantPanel, AssistantPanelEvent, CharOperation, LineDiff, LineOperation, ModelSelector, - StreamingDiff, + AssistantPanel, AssistantPanelEvent, CharOperation, CycleNextInlineAssist, + CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, StreamingDiff, }; use anyhow::{anyhow, Context as _, Result}; use client::{telemetry::Telemetry, ErrorExt}; @@ -12,8 +12,9 @@ use editor::{ BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, ToDisplayPoint, }, - Anchor, AnchorRangeExt, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, - ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, + Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorElement, EditorEvent, EditorMode, + EditorStyle, ExcerptId, ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, + ToOffset as _, ToPoint, }; use feature_flags::{FeatureFlagAppExt as _, ZedPro}; use fs::Fs; @@ -25,34 +26,37 @@ use futures::{ SinkExt, Stream, StreamExt, }; use gpui::{ - anchored, deferred, point, AppContext, ClickEvent, EventEmitter, FocusHandle, FocusableView, - FontWeight, Global, HighlightStyle, Model, ModelContext, Subscription, Task, TextStyle, - UpdateGlobal, View, ViewContext, WeakView, WindowContext, + anchored, deferred, point, AnyElement, AppContext, ClickEvent, EventEmitter, FocusHandle, + FocusableView, FontWeight, Global, HighlightStyle, Model, ModelContext, Subscription, Task, + TextStyle, UpdateGlobal, View, ViewContext, WeakView, WindowContext, }; use language::{Buffer, IndentKind, Point, Selection, TransactionId}; use language_model::{ - LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, + LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, }; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; +use project::{CodeAction, ProjectTransaction}; use rope::Rope; use settings::{Settings, SettingsStore}; use smol::future::FutureExt; use std::{ cmp, future::{self, Future}, - mem, + iter, mem, ops::{Range, RangeInclusive}, pin::Pin, sync::Arc, task::{self, Poll}, time::{Duration, Instant}, }; +use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase}; use terminal_view::terminal_panel::TerminalPanel; +use text::{OffsetRangeExt, ToPoint as _}; use theme::ThemeSettings; use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip}; use util::{RangeExt, ResultExt}; -use workspace::{notifications::NotificationId, Toast, Workspace}; +use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace}; pub fn init( fs: Arc, @@ -78,33 +82,13 @@ pub struct InlineAssistant { assists: HashMap, assists_by_editor: HashMap, EditorInlineAssists>, assist_groups: HashMap, - assist_observations: HashMap< - InlineAssistId, - ( - async_watch::Sender, - async_watch::Receiver, - ), - >, - confirmed_assists: HashMap>, + confirmed_assists: HashMap>, prompt_history: VecDeque, prompt_builder: Arc, telemetry: Option>, fs: Arc, } -pub enum AssistStatus { - Idle, - Started, - Stopped, - Finished, -} - -impl AssistStatus { - pub fn is_done(&self) -> bool { - matches!(self, Self::Stopped | Self::Finished) - } -} - impl Global for InlineAssistant {} impl InlineAssistant { @@ -119,7 +103,6 @@ impl InlineAssistant { assists: HashMap::default(), assists_by_editor: HashMap::default(), assist_groups: HashMap::default(), - assist_observations: HashMap::default(), confirmed_assists: HashMap::default(), prompt_history: VecDeque::default(), prompt_builder, @@ -129,8 +112,10 @@ impl InlineAssistant { } pub fn register_workspace(&mut self, workspace: &View, cx: &mut WindowContext) { - cx.subscribe(workspace, |_, event, cx| { - Self::update_global(cx, |this, cx| this.handle_workspace_event(event, cx)); + cx.subscribe(workspace, |workspace, event, cx| { + Self::update_global(cx, |this, cx| { + this.handle_workspace_event(workspace, event, cx) + }); }) .detach(); @@ -150,19 +135,49 @@ impl InlineAssistant { .detach(); } - fn handle_workspace_event(&mut self, event: &workspace::Event, cx: &mut WindowContext) { - // When the user manually saves an editor, automatically accepts all finished transformations. - if let workspace::Event::UserSavedItem { item, .. } = event { - if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) { - if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { - for assist_id in editor_assists.assist_ids.clone() { - let assist = &self.assists[&assist_id]; - if let CodegenStatus::Done = &assist.codegen.read(cx).status { - self.finish_assist(assist_id, false, cx) + fn handle_workspace_event( + &mut self, + workspace: View, + event: &workspace::Event, + cx: &mut WindowContext, + ) { + match event { + workspace::Event::UserSavedItem { item, .. } => { + // When the user manually saves an editor, automatically accepts all finished transformations. + if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) { + if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { + for assist_id in editor_assists.assist_ids.clone() { + let assist = &self.assists[&assist_id]; + if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { + self.finish_assist(assist_id, false, cx) + } } } } } + workspace::Event::ItemAdded { item } => { + self.register_workspace_item(&workspace, item.as_ref(), cx); + } + _ => (), + } + } + + fn register_workspace_item( + &mut self, + workspace: &View, + item: &dyn ItemHandle, + cx: &mut WindowContext, + ) { + if let Some(editor) = item.act_as::(cx) { + editor.update(cx, |editor, cx| { + editor.push_code_action_provider( + Arc::new(AssistantCodeActionProvider { + editor: cx.view().downgrade(), + workspace: workspace.downgrade(), + }), + cx, + ); + }); } } @@ -220,6 +235,21 @@ impl InlineAssistant { text_anchor: buffer.anchor_after(buffer_range.end), }; codegen_ranges.push(start..end); + + if let Some(telemetry) = self.telemetry.as_ref() { + if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { + telemetry.report_assistant_event(AssistantEvent { + conversation_id: None, + kind: AssistantKind::Inline, + phase: AssistantPhase::Invoked, + model: model.telemetry_id(), + model_provider: model.provider_id().to_string(), + response_latency: None, + error_message: None, + language_name: buffer.language().map(|language| language.name().to_proto()), + }); + } + } } let assist_group_id = self.next_assist_group_id.post_inc(); @@ -320,6 +350,7 @@ impl InlineAssistant { mut range: Range, initial_prompt: String, initial_transaction_id: Option, + focus: bool, workspace: Option>, assistant_panel: Option<&View>, cx: &mut WindowContext, @@ -392,6 +423,11 @@ impl InlineAssistant { assist_group.assist_ids.push(assist_id); editor_assists.assist_ids.push(assist_id); self.assist_groups.insert(assist_group_id, assist_group); + + if focus { + self.focus_assist(assist_id, cx); + } + assist_id } @@ -541,7 +577,7 @@ impl InlineAssistant { let assist_range = assist.range.to_offset(&buffer); if assist_range.contains(&selection.start) && assist_range.contains(&selection.end) { - if matches!(assist.codegen.read(cx).status, CodegenStatus::Pending) { + if matches!(assist.codegen.read(cx).status(cx), CodegenStatus::Pending) { self.dismiss_assist(*assist_id, cx); } else { self.finish_assist(*assist_id, false, cx); @@ -659,7 +695,7 @@ impl InlineAssistant { for assist_id in editor_assists.assist_ids.clone() { let assist = &self.assists[&assist_id]; if matches!( - assist.codegen.read(cx).status, + assist.codegen.read(cx).status(cx), CodegenStatus::Error(_) | CodegenStatus::Done ) { let assist_range = assist.range.to_offset(&snapshot); @@ -709,6 +745,33 @@ impl InlineAssistant { pub fn finish_assist(&mut self, assist_id: InlineAssistId, undo: bool, cx: &mut WindowContext) { if let Some(assist) = self.assists.get(&assist_id) { + if let Some(telemetry) = self.telemetry.as_ref() { + if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { + let language_name = assist.editor.upgrade().and_then(|editor| { + let multibuffer = editor.read(cx).buffer().read(cx); + let ranges = multibuffer.range_to_buffer_ranges(assist.range.clone(), cx); + ranges + .first() + .and_then(|(buffer, _, _)| buffer.read(cx).language()) + .map(|language| language.name()) + }); + telemetry.report_assistant_event(AssistantEvent { + conversation_id: None, + kind: AssistantKind::Inline, + phase: if undo { + AssistantPhase::Rejected + } else { + AssistantPhase::Accepted + }, + model: model.telemetry_id(), + model_provider: model.provider_id().to_string(), + response_latency: None, + error_message: None, + language_name: language_name.map(|name| name.to_proto()), + }); + } + } + let assist_group_id = assist.group_id; if self.assist_groups[&assist_group_id].linked { for assist_id in self.unlink_assist_group(assist_group_id, cx) { @@ -746,20 +809,11 @@ impl InlineAssistant { if undo { assist.codegen.update(cx, |codegen, cx| codegen.undo(cx)); } else { - self.confirmed_assists.insert(assist_id, assist.codegen); + let confirmed_alternative = assist.codegen.read(cx).active_alternative().clone(); + self.confirmed_assists + .insert(assist_id, confirmed_alternative); } } - - // Remove the assist from the status updates map - self.assist_observations.remove(&assist_id); - } - - pub fn undo_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) -> bool { - let Some(codegen) = self.confirmed_assists.remove(&assist_id) else { - return false; - }; - codegen.update(cx, |this, cx| this.undo(cx)); - true } fn dismiss_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) -> bool { @@ -950,18 +1004,9 @@ impl InlineAssistant { assist .codegen .update(cx, |codegen, cx| { - codegen.start( - assist.range.clone(), - user_prompt, - assistant_panel_context, - cx, - ) + codegen.start(user_prompt, assistant_panel_context, cx) }) .log_err(); - - if let Some((tx, _)) = self.assist_observations.get(&assist_id) { - tx.send(AssistStatus::Started).ok(); - } } pub fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) { @@ -972,25 +1017,6 @@ impl InlineAssistant { }; assist.codegen.update(cx, |codegen, cx| codegen.stop(cx)); - - if let Some((tx, _)) = self.assist_observations.get(&assist_id) { - tx.send(AssistStatus::Stopped).ok(); - } - } - - pub fn assist_status(&self, assist_id: InlineAssistId, cx: &AppContext) -> InlineAssistStatus { - if let Some(assist) = self.assists.get(&assist_id) { - match &assist.codegen.read(cx).status { - CodegenStatus::Idle => InlineAssistStatus::Idle, - CodegenStatus::Pending => InlineAssistStatus::Pending, - CodegenStatus::Done => InlineAssistStatus::Done, - CodegenStatus::Error(_) => InlineAssistStatus::Error, - } - } else if self.confirmed_assists.contains_key(&assist_id) { - InlineAssistStatus::Confirmed - } else { - InlineAssistStatus::Canceled - } } fn update_editor_highlights(&self, editor: &View, cx: &mut WindowContext) { @@ -1009,16 +1035,16 @@ impl InlineAssistant { for assist_id in assist_ids { if let Some(assist) = self.assists.get(assist_id) { let codegen = assist.codegen.read(cx); - let buffer = codegen.buffer.read(cx).read(cx); - foreground_ranges.extend(codegen.last_equal_ranges().iter().cloned()); + let buffer = codegen.buffer(cx).read(cx).read(cx); + foreground_ranges.extend(codegen.last_equal_ranges(cx).iter().cloned()); let pending_range = - codegen.edit_position.unwrap_or(assist.range.start)..assist.range.end; + codegen.edit_position(cx).unwrap_or(assist.range.start)..assist.range.end; if pending_range.end.to_offset(&buffer) > pending_range.start.to_offset(&buffer) { gutter_pending_ranges.push(pending_range); } - if let Some(edit_position) = codegen.edit_position { + if let Some(edit_position) = codegen.edit_position(cx) { let edited_range = assist.range.start..edit_position; if edited_range.end.to_offset(&buffer) > edited_range.start.to_offset(&buffer) { gutter_transformed_ranges.push(edited_range); @@ -1026,7 +1052,8 @@ impl InlineAssistant { } if assist.decorations.is_some() { - inserted_row_ranges.extend(codegen.diff.inserted_row_ranges.iter().cloned()); + inserted_row_ranges + .extend(codegen.diff(cx).inserted_row_ranges.iter().cloned()); } } } @@ -1075,7 +1102,7 @@ impl InlineAssistant { for row_range in inserted_row_ranges { editor.highlight_rows::( row_range, - Some(cx.theme().status().info_background), + cx.theme().status().info_background, false, cx, ); @@ -1097,9 +1124,9 @@ impl InlineAssistant { }; let codegen = assist.codegen.read(cx); - let old_snapshot = codegen.snapshot.clone(); - let old_buffer = codegen.old_buffer.clone(); - let deleted_row_ranges = codegen.diff.deleted_row_ranges.clone(); + let old_snapshot = codegen.snapshot(cx); + let old_buffer = codegen.old_buffer(cx); + let deleted_row_ranges = codegen.diff(cx).deleted_row_ranges.clone(); editor.update(cx, |editor, cx| { let old_blocks = mem::take(&mut decorations.removed_line_block_ids); @@ -1119,7 +1146,7 @@ impl InlineAssistant { let deleted_lines_editor = cx.new_view(|cx| { let multi_buffer = cx.new_model(|_| { - MultiBuffer::without_headers(0, language::Capability::ReadOnly) + MultiBuffer::without_headers(language::Capability::ReadOnly) }); multi_buffer.update(cx, |multi_buffer, cx| { multi_buffer.push_excerpts( @@ -1141,8 +1168,8 @@ impl InlineAssistant { editor.set_read_only(true); editor.set_show_inline_completions(Some(false), cx); editor.highlight_rows::( - Anchor::min()..=Anchor::max(), - Some(cx.theme().status().deleted_background), + Anchor::min()..Anchor::max(), + cx.theme().status().deleted_background, false, cx, ); @@ -1175,42 +1202,6 @@ impl InlineAssistant { .collect(); }) } - - pub fn observe_assist( - &mut self, - assist_id: InlineAssistId, - ) -> async_watch::Receiver { - if let Some((_, rx)) = self.assist_observations.get(&assist_id) { - rx.clone() - } else { - let (tx, rx) = async_watch::channel(AssistStatus::Idle); - self.assist_observations.insert(assist_id, (tx, rx.clone())); - rx - } - } -} - -pub enum InlineAssistStatus { - Idle, - Pending, - Done, - Error, - Confirmed, - Canceled, -} - -impl InlineAssistStatus { - pub(crate) fn is_pending(&self) -> bool { - matches!(self, Self::Pending) - } - - pub(crate) fn is_confirmed(&self) -> bool { - matches!(self, Self::Confirmed) - } - - pub(crate) fn is_done(&self) -> bool { - matches!(self, Self::Done) - } } struct EditorInlineAssists { @@ -1378,8 +1369,15 @@ impl EventEmitter for PromptEditor {} impl Render for PromptEditor { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let gutter_dimensions = *self.gutter_dimensions.lock(); - let status = &self.codegen.read(cx).status; - let buttons = match status { + let codegen = self.codegen.read(cx); + + let mut buttons = Vec::new(); + if codegen.alternative_count(cx) > 1 { + buttons.push(self.render_cycle_controls(cx)); + } + + let status = codegen.status(cx); + buttons.extend(match status { CodegenStatus::Idle => { vec![ IconButton::new("cancel", IconName::Close) @@ -1388,14 +1386,16 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), IconButton::new("start", IconName::SparkleAlt) .icon_color(Color::Muted) .shape(IconButtonShape::Square) .tooltip(|cx| Tooltip::for_action("Transform", &menu::Confirm, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StartRequested)), - ), + ) + .into_any_element(), ] } CodegenStatus::Pending => { @@ -1406,7 +1406,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::text("Cancel Assist", cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), IconButton::new("stop", IconName::Stop) .icon_color(Color::Error) .shape(IconButtonShape::Square) @@ -1418,9 +1419,8 @@ impl Render for PromptEditor { cx, ) }) - .on_click( - cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StopRequested)), - ), + .on_click(cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StopRequested))) + .into_any_element(), ] } CodegenStatus::Error(_) | CodegenStatus::Done => { @@ -1431,7 +1431,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), if self.edited_since_done || matches!(status, CodegenStatus::Error(_)) { IconButton::new("restart", IconName::RotateCw) .icon_color(Color::Info) @@ -1447,6 +1448,7 @@ impl Render for PromptEditor { .on_click(cx.listener(|_, _, cx| { cx.emit(PromptEditorEvent::StartRequested); })) + .into_any_element() } else { IconButton::new("confirm", IconName::Check) .icon_color(Color::Info) @@ -1455,21 +1457,25 @@ impl Render for PromptEditor { .on_click(cx.listener(|_, _, cx| { cx.emit(PromptEditorEvent::ConfirmRequested); })) + .into_any_element() }, ] } - }; + }); h_flex() + .key_context("PromptEditor") .bg(cx.theme().colors().editor_background) .border_y_1() .border_color(cx.theme().status().info_border) .size_full() - .py(cx.line_height() / 2.) + .py(cx.line_height() / 2.5) .on_action(cx.listener(Self::confirm)) .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::move_up)) .on_action(cx.listener(Self::move_down)) + .capture_action(cx.listener(Self::cycle_prev)) + .capture_action(cx.listener(Self::cycle_next)) .child( h_flex() .w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0)) @@ -1478,7 +1484,7 @@ impl Render for PromptEditor { .child( ModelSelector::new( self.fs.clone(), - IconButton::new("context", IconName::SlidersAlt) + IconButton::new("context", IconName::SettingsAlt) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .icon_color(Color::Muted) @@ -1504,7 +1510,7 @@ impl Render for PromptEditor { ), ) .map(|el| { - let CodegenStatus::Error(error) = &self.codegen.read(cx).status else { + let CodegenStatus::Error(error) = self.codegen.read(cx).status(cx) else { return el; }; @@ -1748,7 +1754,7 @@ impl PromptEditor { } fn handle_codegen_changed(&mut self, _: Model, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle => { self.editor .update(cx, |editor, _| editor.set_read_only(false)); @@ -1779,7 +1785,7 @@ impl PromptEditor { } fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle | CodegenStatus::Done | CodegenStatus::Error(_) => { cx.emit(PromptEditorEvent::CancelRequested); } @@ -1790,7 +1796,7 @@ impl PromptEditor { } fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle => { cx.emit(PromptEditorEvent::StartRequested); } @@ -1850,6 +1856,79 @@ impl PromptEditor { } } + fn cycle_prev(&mut self, _: &CyclePreviousInlineAssist, cx: &mut ViewContext) { + self.codegen + .update(cx, |codegen, cx| codegen.cycle_prev(cx)); + } + + fn cycle_next(&mut self, _: &CycleNextInlineAssist, cx: &mut ViewContext) { + self.codegen + .update(cx, |codegen, cx| codegen.cycle_next(cx)); + } + + fn render_cycle_controls(&self, cx: &ViewContext) -> AnyElement { + let codegen = self.codegen.read(cx); + let disabled = matches!(codegen.status(cx), CodegenStatus::Idle); + + h_flex() + .child( + IconButton::new("previous", IconName::ChevronLeft) + .icon_color(Color::Muted) + .disabled(disabled) + .shape(IconButtonShape::Square) + .tooltip({ + let focus_handle = self.editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Previous Alternative", + &CyclePreviousInlineAssist, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(|this, _, cx| { + this.codegen + .update(cx, |codegen, cx| codegen.cycle_prev(cx)) + })), + ) + .child( + Label::new(format!( + "{}/{}", + codegen.active_alternative + 1, + codegen.alternative_count(cx) + )) + .size(LabelSize::Small) + .color(if disabled { + Color::Disabled + } else { + Color::Muted + }), + ) + .child( + IconButton::new("next", IconName::ChevronRight) + .icon_color(Color::Muted) + .disabled(disabled) + .shape(IconButtonShape::Square) + .tooltip({ + let focus_handle = self.editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Next Alternative", + &CycleNextInlineAssist, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(|this, _, cx| { + this.codegen + .update(cx, |codegen, cx| codegen.cycle_next(cx)) + })), + ) + .into_any_element() + } + fn render_token_count(&self, cx: &mut ViewContext) -> Option { let model = LanguageModelRegistry::read_global(cx).active_model()?; let token_counts = self.token_counts?; @@ -1918,12 +1997,11 @@ impl PromptEditor { } else { cx.theme().colors().text }, - font_family: settings.ui_font.family.clone(), - font_features: settings.ui_font.features.clone(), - font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: rems(0.875).into(), - font_weight: settings.ui_font.weight, - line_height: relative(1.3), + font_family: settings.buffer_font.family.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size.into(), + font_weight: settings.buffer_font.weight, + line_height: relative(settings.buffer_line_height.value()), ..Default::default() }; EditorElement::new( @@ -2097,7 +2175,7 @@ impl InlineAssist { return; }; - if let CodegenStatus::Error(error) = &codegen.read(cx).status { + if let CodegenStatus::Error(error) = codegen.read(cx).status(cx) { if assist.decorations.is_none() { if let Some(workspace) = assist .workspace @@ -2109,7 +2187,7 @@ impl InlineAssist { struct InlineAssistantError; let id = - NotificationId::identified::( + NotificationId::composite::( assist_id.0, ); @@ -2121,8 +2199,6 @@ impl InlineAssist { if assist.decorations.is_none() { this.finish_assist(assist_id, false, cx); - } else if let Some(tx) = this.assist_observations.get(&assist_id) { - tx.0.send(AssistStatus::Finished).ok(); } } }) @@ -2158,12 +2234,9 @@ impl InlineAssist { return future::ready(Err(anyhow!("no user prompt"))).boxed(); }; let assistant_panel_context = self.assistant_panel_context(cx); - self.codegen.read(cx).count_tokens( - self.range.clone(), - user_prompt, - assistant_panel_context, - cx, - ) + self.codegen + .read(cx) + .count_tokens(user_prompt, assistant_panel_context, cx) } } @@ -2174,19 +2247,216 @@ struct InlineAssistDecorations { end_block_id: CustomBlockId, } -#[derive(Debug)] +#[derive(Copy, Clone, Debug)] pub enum CodegenEvent { Finished, Undone, } pub struct Codegen { + alternatives: Vec>, + active_alternative: usize, + subscriptions: Vec, + buffer: Model, + range: Range, + initial_transaction_id: Option, + telemetry: Option>, + builder: Arc, +} + +impl Codegen { + pub fn new( + buffer: Model, + range: Range, + initial_transaction_id: Option, + telemetry: Option>, + builder: Arc, + cx: &mut ModelContext, + ) -> Self { + let codegen = cx.new_model(|cx| { + CodegenAlternative::new( + buffer.clone(), + range.clone(), + false, + telemetry.clone(), + builder.clone(), + cx, + ) + }); + let mut this = Self { + alternatives: vec![codegen], + active_alternative: 0, + subscriptions: Vec::new(), + buffer, + range, + initial_transaction_id, + telemetry, + builder, + }; + this.activate(0, cx); + this + } + + fn subscribe_to_alternative(&mut self, cx: &mut ModelContext) { + let codegen = self.active_alternative().clone(); + self.subscriptions.clear(); + self.subscriptions + .push(cx.observe(&codegen, |_, _, cx| cx.notify())); + self.subscriptions + .push(cx.subscribe(&codegen, |_, _, event, cx| cx.emit(*event))); + } + + fn active_alternative(&self) -> &Model { + &self.alternatives[self.active_alternative] + } + + fn status<'a>(&self, cx: &'a AppContext) -> &'a CodegenStatus { + &self.active_alternative().read(cx).status + } + + fn alternative_count(&self, cx: &AppContext) -> usize { + LanguageModelRegistry::read_global(cx) + .inline_alternative_models() + .len() + + 1 + } + + pub fn cycle_prev(&mut self, cx: &mut ModelContext) { + let next_active_ix = if self.active_alternative == 0 { + self.alternatives.len() - 1 + } else { + self.active_alternative - 1 + }; + self.activate(next_active_ix, cx); + } + + pub fn cycle_next(&mut self, cx: &mut ModelContext) { + let next_active_ix = (self.active_alternative + 1) % self.alternatives.len(); + self.activate(next_active_ix, cx); + } + + fn activate(&mut self, index: usize, cx: &mut ModelContext) { + self.active_alternative() + .update(cx, |codegen, cx| codegen.set_active(false, cx)); + self.active_alternative = index; + self.active_alternative() + .update(cx, |codegen, cx| codegen.set_active(true, cx)); + self.subscribe_to_alternative(cx); + cx.notify(); + } + + pub fn start( + &mut self, + user_prompt: String, + assistant_panel_context: Option, + cx: &mut ModelContext, + ) -> Result<()> { + let alternative_models = LanguageModelRegistry::read_global(cx) + .inline_alternative_models() + .to_vec(); + + self.active_alternative() + .update(cx, |alternative, cx| alternative.undo(cx)); + self.activate(0, cx); + self.alternatives.truncate(1); + + for _ in 0..alternative_models.len() { + self.alternatives.push(cx.new_model(|cx| { + CodegenAlternative::new( + self.buffer.clone(), + self.range.clone(), + false, + self.telemetry.clone(), + self.builder.clone(), + cx, + ) + })); + } + + let primary_model = LanguageModelRegistry::read_global(cx) + .active_model() + .context("no active model")?; + + for (model, alternative) in iter::once(primary_model) + .chain(alternative_models) + .zip(&self.alternatives) + { + alternative.update(cx, |alternative, cx| { + alternative.start( + user_prompt.clone(), + assistant_panel_context.clone(), + model.clone(), + cx, + ) + })?; + } + + Ok(()) + } + + pub fn stop(&mut self, cx: &mut ModelContext) { + for codegen in &self.alternatives { + codegen.update(cx, |codegen, cx| codegen.stop(cx)); + } + } + + pub fn undo(&mut self, cx: &mut ModelContext) { + self.active_alternative() + .update(cx, |codegen, cx| codegen.undo(cx)); + + self.buffer.update(cx, |buffer, cx| { + if let Some(transaction_id) = self.initial_transaction_id.take() { + buffer.undo_transaction(transaction_id, cx); + buffer.refresh_preview(cx); + } + }); + } + + pub fn count_tokens( + &self, + user_prompt: String, + assistant_panel_context: Option, + cx: &AppContext, + ) -> BoxFuture<'static, Result> { + self.active_alternative() + .read(cx) + .count_tokens(user_prompt, assistant_panel_context, cx) + } + + pub fn buffer(&self, cx: &AppContext) -> Model { + self.active_alternative().read(cx).buffer.clone() + } + + pub fn old_buffer(&self, cx: &AppContext) -> Model { + self.active_alternative().read(cx).old_buffer.clone() + } + + pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { + self.active_alternative().read(cx).snapshot.clone() + } + + pub fn edit_position(&self, cx: &AppContext) -> Option { + self.active_alternative().read(cx).edit_position + } + + fn diff<'a>(&self, cx: &'a AppContext) -> &'a Diff { + &self.active_alternative().read(cx).diff + } + + pub fn last_equal_ranges<'a>(&self, cx: &'a AppContext) -> &'a [Range] { + self.active_alternative().read(cx).last_equal_ranges() + } +} + +impl EventEmitter for Codegen {} + +pub struct CodegenAlternative { buffer: Model, old_buffer: Model, snapshot: MultiBufferSnapshot, edit_position: Option, + range: Range, last_equal_ranges: Vec>, - initial_transaction_id: Option, transformation_transaction_id: Option, status: CodegenStatus, generation: Task<()>, @@ -2194,6 +2464,9 @@ pub struct Codegen { telemetry: Option>, _subscription: gpui::Subscription, builder: Arc, + active: bool, + edits: Vec<(Range, String)>, + line_operations: Vec, } enum CodegenStatus { @@ -2206,7 +2479,7 @@ enum CodegenStatus { #[derive(Default)] struct Diff { deleted_row_ranges: Vec<(Anchor, RangeInclusive)>, - inserted_row_ranges: Vec>, + inserted_row_ranges: Vec>, } impl Diff { @@ -2215,13 +2488,13 @@ impl Diff { } } -impl EventEmitter for Codegen {} +impl EventEmitter for CodegenAlternative {} -impl Codegen { +impl CodegenAlternative { pub fn new( buffer: Model, range: Range, - initial_transaction_id: Option, + active: bool, telemetry: Option>, builder: Arc, cx: &mut ModelContext, @@ -2260,8 +2533,33 @@ impl Codegen { diff: Diff::default(), telemetry, _subscription: cx.subscribe(&buffer, Self::handle_buffer_event), - initial_transaction_id, builder, + active, + edits: Vec::new(), + line_operations: Vec::new(), + range, + } + } + + fn set_active(&mut self, active: bool, cx: &mut ModelContext) { + if active != self.active { + self.active = active; + + if self.active { + let edits = self.edits.clone(); + self.apply_edits(edits, cx); + if matches!(self.status, CodegenStatus::Pending) { + let line_operations = self.line_operations.clone(); + self.reapply_line_based_diff(line_operations, cx); + } else { + self.reapply_batch_diff(cx).detach(); + } + } else if let Some(transaction_id) = self.transformation_transaction_id.take() { + self.buffer.update(cx, |buffer, cx| { + buffer.undo_transaction(transaction_id, cx); + buffer.forget_transaction(transaction_id, cx); + }); + } } } @@ -2286,14 +2584,12 @@ impl Codegen { pub fn count_tokens( &self, - edit_range: Range, user_prompt: String, assistant_panel_context: Option, cx: &AppContext, ) -> BoxFuture<'static, Result> { if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { - let request = - self.build_request(user_prompt, assistant_panel_context.clone(), edit_range, cx); + let request = self.build_request(user_prompt, assistant_panel_context.clone(), cx); match request { Ok(request) => { let total_count = model.count_tokens(request.clone(), cx); @@ -2318,39 +2614,32 @@ impl Codegen { pub fn start( &mut self, - edit_range: Range, user_prompt: String, assistant_panel_context: Option, + model: Arc, cx: &mut ModelContext, ) -> Result<()> { - let model = LanguageModelRegistry::read_global(cx) - .active_model() - .context("no active model")?; - if let Some(transformation_transaction_id) = self.transformation_transaction_id.take() { self.buffer.update(cx, |buffer, cx| { buffer.undo_transaction(transformation_transaction_id, cx); }); } - self.edit_position = Some(edit_range.start.bias_right(&self.snapshot)); + self.edit_position = Some(self.range.start.bias_right(&self.snapshot)); let telemetry_id = model.telemetry_id(); - let chunks: LocalBoxFuture>>> = if user_prompt - .trim() - .to_lowercase() - == "delete" - { - async { Ok(stream::empty().boxed()) }.boxed_local() - } else { - let request = - self.build_request(user_prompt, assistant_panel_context, edit_range.clone(), cx)?; + let provider_id = model.provider_id(); + let chunks: LocalBoxFuture>>> = + if user_prompt.trim().to_lowercase() == "delete" { + async { Ok(stream::empty().boxed()) }.boxed_local() + } else { + let request = self.build_request(user_prompt, assistant_panel_context, cx)?; - let chunks = - cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }); - async move { Ok(chunks.await?.boxed()) }.boxed_local() - }; - self.handle_stream(telemetry_id, edit_range, chunks, cx); + let chunks = cx + .spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }); + async move { Ok(chunks.await?.boxed()) }.boxed_local() + }; + self.handle_stream(telemetry_id, provider_id.to_string(), chunks, cx); Ok(()) } @@ -2358,11 +2647,10 @@ impl Codegen { &self, user_prompt: String, assistant_panel_context: Option, - edit_range: Range, cx: &AppContext, ) -> Result { let buffer = self.buffer.read(cx).snapshot(cx); - let language = buffer.language_at(edit_range.start); + let language = buffer.language_at(self.range.start); let language_name = if let Some(language) = language.as_ref() { if Arc::ptr_eq(language, &language::PLAIN_TEXT) { None @@ -2373,22 +2661,9 @@ impl Codegen { None }; - // Higher Temperature increases the randomness of model outputs. - // If Markdown or No Language is Known, increase the randomness for more creative output - // If Code, decrease temperature to get more deterministic outputs - let temperature = if let Some(language) = language_name.clone() { - if language.as_ref() == "Markdown" { - 1.0 - } else { - 0.5 - } - } else { - 1.0 - }; - - let language_name = language_name.as_deref(); - let start = buffer.point_to_buffer_offset(edit_range.start); - let end = buffer.point_to_buffer_offset(edit_range.end); + let language_name = language_name.as_ref(); + let start = buffer.point_to_buffer_offset(self.range.start); + let end = buffer.point_to_buffer_offset(self.range.end); let (buffer, range) = if let Some((start, end)) = start.zip(end) { let (start_buffer, start_buffer_offset) = start; let (end_buffer, end_buffer_offset) = end; @@ -2420,24 +2695,24 @@ impl Codegen { Ok(LanguageModelRequest { messages, tools: Vec::new(), - stop: vec!["|END|>".to_string()], - temperature, + stop: Vec::new(), + temperature: None, }) } pub fn handle_stream( &mut self, model_telemetry_id: String, - edit_range: Range, + model_provider_id: String, stream: impl 'static + Future>>>, cx: &mut ModelContext, ) { let snapshot = self.snapshot.clone(); let selected_text = snapshot - .text_for_range(edit_range.start..edit_range.end) + .text_for_range(self.range.start..self.range.end) .collect::(); - let selection_start = edit_range.start.to_point(&snapshot); + let selection_start = self.range.start.to_point(&snapshot); // Start with the indentation of the first line in the selection let mut suggested_line_indent = snapshot @@ -2448,7 +2723,7 @@ impl Codegen { // If the first line in the selection does not have indentation, check the following lines if suggested_line_indent.len == 0 && suggested_line_indent.kind == IndentKind::Space { - for row in selection_start.row..=edit_range.end.to_point(&snapshot).row { + for row in selection_start.row..=self.range.end.to_point(&snapshot).row { let line_indent = snapshot.indent_size_for_line(MultiBufferRow(row)); // Prefer tabs if a line in the selection uses tabs as indentation if line_indent.kind == IndentKind::Tab { @@ -2459,9 +2734,18 @@ impl Codegen { } let telemetry = self.telemetry.clone(); + let language_name = { + let multibuffer = self.buffer.read(cx); + let ranges = multibuffer.range_to_buffer_ranges(self.range.clone(), cx); + ranges + .first() + .and_then(|(buffer, _, _)| buffer.read(cx).language()) + .map(|language| language.name()) + }; + self.diff = Diff::default(); self.status = CodegenStatus::Pending; - let mut edit_start = edit_range.start.to_offset(&snapshot); + let mut edit_start = self.range.start.to_offset(&snapshot); self.generation = cx.spawn(|codegen, mut cx| { async move { let chunks = stream.await; @@ -2569,81 +2853,58 @@ impl Codegen { let error_message = result.as_ref().err().map(|error| error.to_string()); if let Some(telemetry) = telemetry { - telemetry.report_assistant_event( - None, - telemetry_events::AssistantKind::Inline, - model_telemetry_id, + telemetry.report_assistant_event(AssistantEvent { + conversation_id: None, + kind: AssistantKind::Inline, + phase: AssistantPhase::Response, + model: model_telemetry_id, + model_provider: model_provider_id.to_string(), response_latency, error_message, - ); + language_name: language_name.map(|name| name.to_proto()), + }); } result?; Ok(()) }); - while let Some((char_ops, line_diff)) = diff_rx.next().await { + while let Some((char_ops, line_ops)) = diff_rx.next().await { codegen.update(&mut cx, |codegen, cx| { codegen.last_equal_ranges.clear(); - let transaction = codegen.buffer.update(cx, |buffer, cx| { - // Avoid grouping assistant edits with user edits. - buffer.finalize_last_transaction(cx); - - buffer.start_transaction(cx); - buffer.edit( - char_ops - .into_iter() - .filter_map(|operation| match operation { - CharOperation::Insert { text } => { - let edit_start = snapshot.anchor_after(edit_start); - Some((edit_start..edit_start, text)) - } - CharOperation::Delete { bytes } => { - let edit_end = edit_start + bytes; - let edit_range = snapshot.anchor_after(edit_start) - ..snapshot.anchor_before(edit_end); - edit_start = edit_end; - Some((edit_range, String::new())) - } - CharOperation::Keep { bytes } => { - let edit_end = edit_start + bytes; - let edit_range = snapshot.anchor_after(edit_start) - ..snapshot.anchor_before(edit_end); - edit_start = edit_end; - codegen.last_equal_ranges.push(edit_range); - None - } - }), - None, - cx, - ); - codegen.edit_position = Some(snapshot.anchor_after(edit_start)); - - buffer.end_transaction(cx) - }); + let edits = char_ops + .into_iter() + .filter_map(|operation| match operation { + CharOperation::Insert { text } => { + let edit_start = snapshot.anchor_after(edit_start); + Some((edit_start..edit_start, text)) + } + CharOperation::Delete { bytes } => { + let edit_end = edit_start + bytes; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + Some((edit_range, String::new())) + } + CharOperation::Keep { bytes } => { + let edit_end = edit_start + bytes; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + codegen.last_equal_ranges.push(edit_range); + None + } + }) + .collect::>(); - if let Some(transaction) = transaction { - if let Some(first_transaction) = - codegen.transformation_transaction_id - { - // Group all assistant edits into the first transaction. - codegen.buffer.update(cx, |buffer, cx| { - buffer.merge_transactions( - transaction, - first_transaction, - cx, - ) - }); - } else { - codegen.transformation_transaction_id = Some(transaction); - codegen.buffer.update(cx, |buffer, cx| { - buffer.finalize_last_transaction(cx) - }); - } + if codegen.active { + codegen.apply_edits(edits.iter().cloned(), cx); + codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx); } - - codegen.reapply_line_based_diff(edit_range.clone(), line_diff, cx); + codegen.edits.extend(edits); + codegen.line_operations = line_ops; + codegen.edit_position = Some(snapshot.anchor_after(edit_start)); cx.notify(); })?; @@ -2652,9 +2913,8 @@ impl Codegen { // Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer. // That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff. // It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`. - let batch_diff_task = codegen.update(&mut cx, |codegen, cx| { - codegen.reapply_batch_diff(edit_range.clone(), cx) - })?; + let batch_diff_task = + codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?; let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task); line_based_stream_diff?; @@ -2698,24 +2958,45 @@ impl Codegen { buffer.undo_transaction(transaction_id, cx); buffer.refresh_preview(cx); } + }); + } - if let Some(transaction_id) = self.initial_transaction_id.take() { - buffer.undo_transaction(transaction_id, cx); - buffer.refresh_preview(cx); - } + fn apply_edits( + &mut self, + edits: impl IntoIterator, String)>, + cx: &mut ModelContext, + ) { + let transaction = self.buffer.update(cx, |buffer, cx| { + // Avoid grouping assistant edits with user edits. + buffer.finalize_last_transaction(cx); + buffer.start_transaction(cx); + buffer.edit(edits, None, cx); + buffer.end_transaction(cx) }); + + if let Some(transaction) = transaction { + if let Some(first_transaction) = self.transformation_transaction_id { + // Group all assistant edits into the first transaction. + self.buffer.update(cx, |buffer, cx| { + buffer.merge_transactions(transaction, first_transaction, cx) + }); + } else { + self.transformation_transaction_id = Some(transaction); + self.buffer + .update(cx, |buffer, cx| buffer.finalize_last_transaction(cx)); + } + } } fn reapply_line_based_diff( &mut self, - edit_range: Range, - line_operations: Vec, + line_operations: impl IntoIterator, cx: &mut ModelContext, ) { let old_snapshot = self.snapshot.clone(); - let old_range = edit_range.to_point(&old_snapshot); + let old_range = self.range.to_point(&old_snapshot); let new_snapshot = self.buffer.read(cx).snapshot(cx); - let new_range = edit_range.to_point(&new_snapshot); + let new_range = self.range.to_point(&new_snapshot); let mut old_row = old_range.start.row; let mut new_row = new_range.start.row; @@ -2757,7 +3038,7 @@ impl Codegen { new_end_row, new_snapshot.line_len(MultiBufferRow(new_end_row)), )); - self.diff.inserted_row_ranges.push(start..=end); + self.diff.inserted_row_ranges.push(start..end); new_row += lines; } } @@ -2766,15 +3047,11 @@ impl Codegen { } } - fn reapply_batch_diff( - &mut self, - edit_range: Range, - cx: &mut ModelContext, - ) -> Task<()> { + fn reapply_batch_diff(&mut self, cx: &mut ModelContext) -> Task<()> { let old_snapshot = self.snapshot.clone(); - let old_range = edit_range.to_point(&old_snapshot); + let old_range = self.range.to_point(&old_snapshot); let new_snapshot = self.buffer.read(cx).snapshot(cx); - let new_range = edit_range.to_point(&new_snapshot); + let new_range = self.range.to_point(&new_snapshot); cx.spawn(|codegen, mut cx| async move { let (deleted_row_ranges, inserted_row_ranges) = cx @@ -2839,7 +3116,7 @@ impl Codegen { new_end_row, new_snapshot.line_len(MultiBufferRow(new_end_row)), )); - inserted_row_ranges.push(start..=end); + inserted_row_ranges.push(start..end); new_row += line_count; } } @@ -2988,6 +3265,132 @@ where } } +struct AssistantCodeActionProvider { + editor: WeakView, + workspace: WeakView, +} + +impl CodeActionProvider for AssistantCodeActionProvider { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>> { + let snapshot = buffer.read(cx).snapshot(); + let mut range = range.to_point(&snapshot); + + // Expand the range to line boundaries. + range.start.column = 0; + range.end.column = snapshot.line_len(range.end.row); + + let mut has_diagnostics = false; + for diagnostic in snapshot.diagnostics_in_range::<_, Point>(range.clone(), false) { + range.start = cmp::min(range.start, diagnostic.range.start); + range.end = cmp::max(range.end, diagnostic.range.end); + has_diagnostics = true; + } + if has_diagnostics { + if let Some(symbols_containing_start) = snapshot.symbols_containing(range.start, None) { + if let Some(symbol) = symbols_containing_start.last() { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); + } + } + + if let Some(symbols_containing_end) = snapshot.symbols_containing(range.end, None) { + if let Some(symbol) = symbols_containing_end.last() { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); + } + } + + Task::ready(Ok(vec![CodeAction { + server_id: language::LanguageServerId(0), + range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end), + lsp_action: lsp::CodeAction { + title: "Fix with Assistant".into(), + ..Default::default() + }, + }])) + } else { + Task::ready(Ok(Vec::new())) + } + } + + fn apply_code_action( + &self, + buffer: Model, + action: CodeAction, + excerpt_id: ExcerptId, + _push_to_history: bool, + cx: &mut WindowContext, + ) -> Task> { + let editor = self.editor.clone(); + let workspace = self.workspace.clone(); + cx.spawn(|mut cx| async move { + let editor = editor.upgrade().context("editor was released")?; + let range = editor + .update(&mut cx, |editor, cx| { + editor.buffer().update(cx, |multibuffer, cx| { + let buffer = buffer.read(cx); + let multibuffer_snapshot = multibuffer.read(cx); + + let old_context_range = + multibuffer_snapshot.context_range_for_excerpt(excerpt_id)?; + let mut new_context_range = old_context_range.clone(); + if action + .range + .start + .cmp(&old_context_range.start, buffer) + .is_lt() + { + new_context_range.start = action.range.start; + } + if action.range.end.cmp(&old_context_range.end, buffer).is_gt() { + new_context_range.end = action.range.end; + } + drop(multibuffer_snapshot); + + if new_context_range != old_context_range { + multibuffer.resize_excerpt(excerpt_id, new_context_range, cx); + } + + let multibuffer_snapshot = multibuffer.read(cx); + Some( + multibuffer_snapshot + .anchor_in_excerpt(excerpt_id, action.range.start)? + ..multibuffer_snapshot + .anchor_in_excerpt(excerpt_id, action.range.end)?, + ) + }) + })? + .context("invalid range")?; + let assistant_panel = workspace.update(&mut cx, |workspace, cx| { + workspace + .panel::(cx) + .context("assistant panel was released") + })??; + + cx.update_global(|assistant: &mut InlineAssistant, cx| { + let assist_id = assistant.suggest_assist( + &editor, + range, + "Fix Diagnostics".into(), + None, + true, + Some(workspace), + Some(&assistant_panel), + cx, + ); + assistant.start_assist(assist_id, cx); + })?; + + Ok(ProjectTransaction::default()) + }) + } +} + fn prefixes(text: &str) -> impl Iterator { (0..text.len() - 1).map(|ix| &text[..ix + 1]) } @@ -3058,10 +3461,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3072,7 +3475,7 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range, + String::new(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3130,10 +3533,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3144,7 +3547,7 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), + String::new(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3205,10 +3608,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3219,7 +3622,7 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), + String::new(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3279,10 +3682,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3293,7 +3696,7 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), + String::new(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3323,6 +3726,79 @@ mod tests { ); } + #[gpui::test] + async fn test_inactive_codegen_alternative(cx: &mut TestAppContext) { + cx.update(LanguageModelRegistry::test); + cx.set_global(cx.update(SettingsStore::test)); + cx.update(language_settings::init); + + let text = indoc! {" + fn main() { + let x = 0; + } + "}; + let buffer = + cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let range = buffer.read_with(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 14)) + }); + let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); + let codegen = cx.new_model(|cx| { + CodegenAlternative::new( + buffer.clone(), + range.clone(), + false, + None, + prompt_builder, + cx, + ) + }); + + let (chunks_tx, chunks_rx) = mpsc::unbounded(); + codegen.update(cx, |codegen, cx| { + codegen.handle_stream( + String::new(), + String::new(), + future::ready(Ok(chunks_rx.map(Ok).boxed())), + cx, + ) + }); + + chunks_tx + .unbounded_send("let mut x = 0;\nx += 1;".to_string()) + .unwrap(); + drop(chunks_tx); + cx.run_until_parked(); + + // The codegen is inactive, so the buffer doesn't get modified. + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + text + ); + + // Activating the codegen applies the changes. + codegen.update(cx, |codegen, cx| codegen.set_active(true, cx)); + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + indoc! {" + fn main() { + let mut x = 0; + x += 1; + } + "} + ); + + // Deactivating the codegen undoes the changes. + codegen.update(cx, |codegen, cx| codegen.set_active(false, cx)); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + text + ); + } + #[gpui::test] async fn test_strip_invalid_spans_from_codeblock() { assert_chunks("Lorem ipsum dolor", "Lorem ipsum dolor").await; @@ -3373,7 +3849,7 @@ mod tests { }, ..Default::default() }, - Some(tree_sitter_rust::language()), + Some(tree_sitter_rust::LANGUAGE.into()), ) .with_indents_query( r#" diff --git a/crates/assistant/src/patch.rs b/crates/assistant/src/patch.rs new file mode 100644 index 00000000000000..82c81d3b865505 --- /dev/null +++ b/crates/assistant/src/patch.rs @@ -0,0 +1,746 @@ +use anyhow::{anyhow, Context as _, Result}; +use collections::HashMap; +use editor::ProposedChangesEditor; +use futures::{future, TryFutureExt as _}; +use gpui::{AppContext, AsyncAppContext, Model, SharedString}; +use language::{AutoindentMode, Buffer, BufferSnapshot}; +use project::{Project, ProjectPath}; +use std::{cmp, ops::Range, path::Path, sync::Arc}; +use text::{AnchorRangeExt as _, Bias, OffsetRangeExt as _, Point}; + +#[derive(Clone, Debug)] +pub(crate) struct AssistantPatch { + pub range: Range, + pub title: SharedString, + pub edits: Arc<[Result]>, + pub status: AssistantPatchStatus, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub(crate) enum AssistantPatchStatus { + Pending, + Ready, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) struct AssistantEdit { + pub path: String, + pub kind: AssistantEditKind, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum AssistantEditKind { + Update { + old_text: String, + new_text: String, + description: String, + }, + Create { + new_text: String, + description: String, + }, + InsertBefore { + old_text: String, + new_text: String, + description: String, + }, + InsertAfter { + old_text: String, + new_text: String, + description: String, + }, + Delete { + old_text: String, + }, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub(crate) struct ResolvedPatch { + pub edit_groups: HashMap, Vec>, + pub errors: Vec, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ResolvedEditGroup { + pub context_range: Range, + pub edits: Vec, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ResolvedEdit { + range: Range, + new_text: String, + description: Option, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub(crate) struct AssistantPatchResolutionError { + pub edit_ix: usize, + pub message: String, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +enum SearchDirection { + Up, + Left, + Diagonal, +} + +// A measure of the currently quality of an in-progress fuzzy search. +// +// Uses 60 bits to store a numeric cost, and 4 bits to store the preceding +// operation in the search. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +struct SearchState { + score: u32, + direction: SearchDirection, +} + +impl SearchState { + fn new(score: u32, direction: SearchDirection) -> Self { + Self { score, direction } + } +} + +impl ResolvedPatch { + pub fn apply(&self, editor: &ProposedChangesEditor, cx: &mut AppContext) { + for (buffer, groups) in &self.edit_groups { + let branch = editor.branch_buffer_for_base(buffer).unwrap(); + Self::apply_edit_groups(groups, &branch, cx); + } + editor.recalculate_all_buffer_diffs(); + } + + fn apply_edit_groups( + groups: &Vec, + buffer: &Model, + cx: &mut AppContext, + ) { + let mut edits = Vec::new(); + for group in groups { + for suggestion in &group.edits { + edits.push((suggestion.range.clone(), suggestion.new_text.clone())); + } + } + buffer.update(cx, |buffer, cx| { + buffer.edit( + edits, + Some(AutoindentMode::Block { + original_indent_columns: Vec::new(), + }), + cx, + ); + }); + } +} + +impl ResolvedEdit { + pub fn try_merge(&mut self, other: &Self, buffer: &text::BufferSnapshot) -> bool { + let range = &self.range; + let other_range = &other.range; + + // Don't merge if we don't contain the other suggestion. + if range.start.cmp(&other_range.start, buffer).is_gt() + || range.end.cmp(&other_range.end, buffer).is_lt() + { + return false; + } + + if let Some(description) = &mut self.description { + if let Some(other_description) = &other.description { + description.push('\n'); + description.push_str(other_description); + } + } + true + } +} + +impl AssistantEdit { + pub fn new( + path: Option, + operation: Option, + old_text: Option, + new_text: Option, + description: Option, + ) -> Result { + let path = path.ok_or_else(|| anyhow!("missing path"))?; + let operation = operation.ok_or_else(|| anyhow!("missing operation"))?; + + let kind = match operation.as_str() { + "update" => AssistantEditKind::Update { + old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?, + new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?, + description: description.ok_or_else(|| anyhow!("missing description"))?, + }, + "insert_before" => AssistantEditKind::InsertBefore { + old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?, + new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?, + description: description.ok_or_else(|| anyhow!("missing description"))?, + }, + "insert_after" => AssistantEditKind::InsertAfter { + old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?, + new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?, + description: description.ok_or_else(|| anyhow!("missing description"))?, + }, + "delete" => AssistantEditKind::Delete { + old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?, + }, + "create" => AssistantEditKind::Create { + description: description.ok_or_else(|| anyhow!("missing description"))?, + new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?, + }, + _ => Err(anyhow!("unknown operation {operation:?}"))?, + }; + + Ok(Self { path, kind }) + } + + pub async fn resolve( + &self, + project: Model, + mut cx: AsyncAppContext, + ) -> Result<(Model, ResolvedEdit)> { + let path = self.path.clone(); + let kind = self.kind.clone(); + let buffer = project + .update(&mut cx, |project, cx| { + let project_path = project + .find_project_path(Path::new(&path), cx) + .or_else(|| { + // If we couldn't find a project path for it, put it in the active worktree + // so that when we create the buffer, it can be saved. + let worktree = project + .active_entry() + .and_then(|entry_id| project.worktree_for_entry(entry_id, cx)) + .or_else(|| project.worktrees(cx).next())?; + let worktree = worktree.read(cx); + + Some(ProjectPath { + worktree_id: worktree.id(), + path: Arc::from(Path::new(&path)), + }) + }) + .with_context(|| format!("worktree not found for {:?}", path))?; + anyhow::Ok(project.open_buffer(project_path, cx)) + })?? + .await?; + + let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?; + let suggestion = cx + .background_executor() + .spawn(async move { kind.resolve(&snapshot) }) + .await; + + Ok((buffer, suggestion)) + } +} + +impl AssistantEditKind { + fn resolve(self, snapshot: &BufferSnapshot) -> ResolvedEdit { + match self { + Self::Update { + old_text, + new_text, + description, + } => { + let range = Self::resolve_location(&snapshot, &old_text); + ResolvedEdit { + range, + new_text, + description: Some(description), + } + } + Self::Create { + new_text, + description, + } => ResolvedEdit { + range: text::Anchor::MIN..text::Anchor::MAX, + description: Some(description), + new_text, + }, + Self::InsertBefore { + old_text, + mut new_text, + description, + } => { + let range = Self::resolve_location(&snapshot, &old_text); + new_text.push('\n'); + ResolvedEdit { + range: range.start..range.start, + new_text, + description: Some(description), + } + } + Self::InsertAfter { + old_text, + mut new_text, + description, + } => { + let range = Self::resolve_location(&snapshot, &old_text); + new_text.insert(0, '\n'); + ResolvedEdit { + range: range.end..range.end, + new_text, + description: Some(description), + } + } + Self::Delete { old_text } => { + let range = Self::resolve_location(&snapshot, &old_text); + ResolvedEdit { + range, + new_text: String::new(), + description: None, + } + } + } + } + + fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range { + const INSERTION_COST: u32 = 3; + const WHITESPACE_INSERTION_COST: u32 = 1; + const DELETION_COST: u32 = 3; + const WHITESPACE_DELETION_COST: u32 = 1; + const EQUALITY_BONUS: u32 = 5; + + struct Matrix { + cols: usize, + data: Vec, + } + + impl Matrix { + fn new(rows: usize, cols: usize) -> Self { + Matrix { + cols, + data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols], + } + } + + fn get(&self, row: usize, col: usize) -> SearchState { + self.data[row * self.cols + col] + } + + fn set(&mut self, row: usize, col: usize, cost: SearchState) { + self.data[row * self.cols + col] = cost; + } + } + + let buffer_len = buffer.len(); + let query_len = search_query.len(); + let mut matrix = Matrix::new(query_len + 1, buffer_len + 1); + + for (row, query_byte) in search_query.bytes().enumerate() { + for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() { + let deletion_cost = if query_byte.is_ascii_whitespace() { + WHITESPACE_DELETION_COST + } else { + DELETION_COST + }; + let insertion_cost = if buffer_byte.is_ascii_whitespace() { + WHITESPACE_INSERTION_COST + } else { + INSERTION_COST + }; + + let up = SearchState::new( + matrix.get(row, col + 1).score.saturating_sub(deletion_cost), + SearchDirection::Up, + ); + let left = SearchState::new( + matrix + .get(row + 1, col) + .score + .saturating_sub(insertion_cost), + SearchDirection::Left, + ); + let diagonal = SearchState::new( + if query_byte == *buffer_byte { + matrix.get(row, col).score.saturating_add(EQUALITY_BONUS) + } else { + matrix + .get(row, col) + .score + .saturating_sub(deletion_cost + insertion_cost) + }, + SearchDirection::Diagonal, + ); + matrix.set(row + 1, col + 1, up.max(left).max(diagonal)); + } + } + + // Traceback to find the best match + let mut best_buffer_end = buffer_len; + let mut best_score = 0; + for col in 1..=buffer_len { + let score = matrix.get(query_len, col).score; + if score > best_score { + best_score = score; + best_buffer_end = col; + } + } + + let mut query_ix = query_len; + let mut buffer_ix = best_buffer_end; + while query_ix > 0 && buffer_ix > 0 { + let current = matrix.get(query_ix, buffer_ix); + match current.direction { + SearchDirection::Diagonal => { + query_ix -= 1; + buffer_ix -= 1; + } + SearchDirection::Up => { + query_ix -= 1; + } + SearchDirection::Left => { + buffer_ix -= 1; + } + } + } + + let mut start = buffer.offset_to_point(buffer.clip_offset(buffer_ix, Bias::Left)); + start.column = 0; + let mut end = buffer.offset_to_point(buffer.clip_offset(best_buffer_end, Bias::Right)); + if end.column > 0 { + end.column = buffer.line_len(end.row); + } + + buffer.anchor_after(start)..buffer.anchor_before(end) + } +} + +impl AssistantPatch { + pub(crate) async fn resolve( + &self, + project: Model, + cx: &mut AsyncAppContext, + ) -> ResolvedPatch { + let mut resolve_tasks = Vec::new(); + for (ix, edit) in self.edits.iter().enumerate() { + if let Ok(edit) = edit.as_ref() { + resolve_tasks.push( + edit.resolve(project.clone(), cx.clone()) + .map_err(move |error| (ix, error)), + ); + } + } + + let edits = future::join_all(resolve_tasks).await; + let mut errors = Vec::new(); + let mut edits_by_buffer = HashMap::default(); + for entry in edits { + match entry { + Ok((buffer, edit)) => { + edits_by_buffer + .entry(buffer) + .or_insert_with(Vec::new) + .push(edit); + } + Err((edit_ix, error)) => errors.push(AssistantPatchResolutionError { + edit_ix, + message: error.to_string(), + }), + } + } + + // Expand the context ranges of each edit and group edits with overlapping context ranges. + let mut edit_groups_by_buffer = HashMap::default(); + for (buffer, edits) in edits_by_buffer { + if let Ok(snapshot) = buffer.update(cx, |buffer, _| buffer.text_snapshot()) { + edit_groups_by_buffer.insert(buffer, Self::group_edits(edits, &snapshot)); + } + } + + ResolvedPatch { + edit_groups: edit_groups_by_buffer, + errors, + } + } + + fn group_edits( + mut edits: Vec, + snapshot: &text::BufferSnapshot, + ) -> Vec { + let mut edit_groups = Vec::::new(); + // Sort edits by their range so that earlier, larger ranges come first + edits.sort_by(|a, b| a.range.cmp(&b.range, &snapshot)); + + // Merge overlapping edits + edits.dedup_by(|a, b| b.try_merge(a, &snapshot)); + + // Create context ranges for each edit + for edit in edits { + let context_range = { + let edit_point_range = edit.range.to_point(&snapshot); + let start_row = edit_point_range.start.row.saturating_sub(5); + let end_row = cmp::min(edit_point_range.end.row + 5, snapshot.max_point().row); + let start = snapshot.anchor_before(Point::new(start_row, 0)); + let end = snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row))); + start..end + }; + + if let Some(last_group) = edit_groups.last_mut() { + if last_group + .context_range + .end + .cmp(&context_range.start, &snapshot) + .is_ge() + { + // Merge with the previous group if context ranges overlap + last_group.context_range.end = context_range.end; + last_group.edits.push(edit); + } else { + // Create a new group + edit_groups.push(ResolvedEditGroup { + context_range, + edits: vec![edit], + }); + } + } else { + // Create the first group + edit_groups.push(ResolvedEditGroup { + context_range, + edits: vec![edit], + }); + } + } + + edit_groups + } + + pub fn path_count(&self) -> usize { + self.paths().count() + } + + pub fn paths(&self) -> impl '_ + Iterator { + let mut prev_path = None; + self.edits.iter().filter_map(move |edit| { + if let Ok(edit) = edit { + let path = Some(edit.path.as_str()); + if path != prev_path { + prev_path = path; + return path; + } + } + None + }) + } +} + +impl PartialEq for AssistantPatch { + fn eq(&self, other: &Self) -> bool { + self.range == other.range + && self.title == other.title + && Arc::ptr_eq(&self.edits, &other.edits) + } +} + +impl Eq for AssistantPatch {} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::{AppContext, Context}; + use language::{ + language_settings::AllLanguageSettings, Language, LanguageConfig, LanguageMatcher, + }; + use settings::SettingsStore; + use text::{OffsetRangeExt, Point}; + use ui::BorrowAppContext; + use unindent::Unindent as _; + + #[gpui::test] + fn test_resolve_location(cx: &mut AppContext) { + { + let buffer = cx.new_model(|cx| { + Buffer::local( + concat!( + " Lorem\n", + " ipsum\n", + " dolor sit amet\n", + " consecteur", + ), + cx, + ) + }); + let snapshot = buffer.read(cx).snapshot(); + assert_eq!( + AssistantEditKind::resolve_location(&snapshot, "ipsum\ndolor").to_point(&snapshot), + Point::new(1, 0)..Point::new(2, 18) + ); + } + + { + let buffer = cx.new_model(|cx| { + Buffer::local( + concat!( + "fn foo1(a: usize) -> usize {\n", + " 40\n", + "}\n", + "\n", + "fn foo2(b: usize) -> usize {\n", + " 42\n", + "}\n", + ), + cx, + ) + }); + let snapshot = buffer.read(cx).snapshot(); + assert_eq!( + AssistantEditKind::resolve_location(&snapshot, "fn foo1(b: usize) {\n40\n}") + .to_point(&snapshot), + Point::new(0, 0)..Point::new(2, 1) + ); + } + + { + let buffer = cx.new_model(|cx| { + Buffer::local( + concat!( + "fn main() {\n", + " Foo\n", + " .bar()\n", + " .baz()\n", + " .qux()\n", + "}\n", + "\n", + "fn foo2(b: usize) -> usize {\n", + " 42\n", + "}\n", + ), + cx, + ) + }); + let snapshot = buffer.read(cx).snapshot(); + assert_eq!( + AssistantEditKind::resolve_location(&snapshot, "Foo.bar.baz.qux()") + .to_point(&snapshot), + Point::new(1, 0)..Point::new(4, 14) + ); + } + } + + #[gpui::test] + fn test_resolve_edits(cx: &mut AppContext) { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + cx.update_global::(|settings, cx| { + settings.update_user_settings::(cx, |_| {}); + }); + + assert_edits( + " + /// A person + struct Person { + name: String, + age: usize, + } + + /// A dog + struct Dog { + weight: f32, + } + + impl Person { + fn name(&self) -> &str { + &self.name + } + } + " + .unindent(), + vec![ + AssistantEditKind::Update { + old_text: " + name: String, + " + .unindent(), + new_text: " + first_name: String, + last_name: String, + " + .unindent(), + description: "".into(), + }, + AssistantEditKind::Update { + old_text: " + fn name(&self) -> &str { + &self.name + } + " + .unindent(), + new_text: " + fn name(&self) -> String { + format!(\"{} {}\", self.first_name, self.last_name) + } + " + .unindent(), + description: "".into(), + }, + ], + " + /// A person + struct Person { + first_name: String, + last_name: String, + age: usize, + } + + /// A dog + struct Dog { + weight: f32, + } + + impl Person { + fn name(&self) -> String { + format!(\"{} {}\", self.first_name, self.last_name) + } + } + " + .unindent(), + cx, + ); + } + + #[track_caller] + fn assert_edits( + old_text: String, + edits: Vec, + new_text: String, + cx: &mut AppContext, + ) { + let buffer = + cx.new_model(|cx| Buffer::local(old_text, cx).with_language(Arc::new(rust_lang()), cx)); + let snapshot = buffer.read(cx).snapshot(); + let resolved_edits = edits + .into_iter() + .map(|kind| kind.resolve(&snapshot)) + .collect(); + let edit_groups = AssistantPatch::group_edits(resolved_edits, &snapshot); + ResolvedPatch::apply_edit_groups(&edit_groups, &buffer, cx); + let actual_new_text = buffer.read(cx).text(); + pretty_assertions::assert_eq!(actual_new_text, new_text); + } + + fn rust_lang() -> Language { + Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(language::tree_sitter_rust::LANGUAGE.into()), + ) + .with_indents_query( + r#" + (call_expression) @indent + (field_expression) @indent + (_ "(" ")" @end) @indent + (_ "{" "}" @end) @indent + "#, + ) + .unwrap() + } +} diff --git a/crates/assistant/src/prompt_library.rs b/crates/assistant/src/prompt_library.rs index c99a7c15214d24..298bb322265c5f 100644 --- a/crates/assistant/src/prompt_library.rs +++ b/crates/assistant/src/prompt_library.rs @@ -521,9 +521,9 @@ impl PromptLibrary { editor.set_show_indent_guides(false, cx); editor.set_use_modal_editing(false); editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); - editor.set_completion_provider(Box::new( + editor.set_completion_provider(Some(Box::new( SlashCommandCompletionProvider::new(None, None), - )); + ))); if focus { editor.focus(cx); } @@ -796,7 +796,7 @@ impl PromptLibrary { }], tools: Vec::new(), stop: Vec::new(), - temperature: 1., + temperature: None, }, cx, ) @@ -910,7 +910,7 @@ impl PromptLibrary { .features .clone(), font_size: HeadlineSize::Large - .size() + .rems() .into(), font_weight: settings.ui_font.weight, line_height: relative( @@ -921,10 +921,8 @@ impl PromptLibrary { scrollbar_width: Pixels::ZERO, syntax: cx.theme().syntax().clone(), status: cx.theme().status().clone(), - inlay_hints_style: HighlightStyle { - color: Some(cx.theme().status().hint), - ..HighlightStyle::default() - }, + inlay_hints_style: + editor::make_inlay_hints_style(cx), suggestions_style: HighlightStyle { color: Some(cx.theme().status().predictive), ..HighlightStyle::default() diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 068bf7158de08c..132b3df68f4bc6 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -4,13 +4,20 @@ use fs::Fs; use futures::StreamExt; use gpui::AssetSource; use handlebars::{Handlebars, RenderError}; -use language::BufferSnapshot; +use language::{BufferSnapshot, LanguageName, Point}; use parking_lot::Mutex; use serde::Serialize; use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration}; use text::LineEnding; use util::ResultExt; +#[derive(Serialize)] +pub struct ContentPromptDiagnosticContext { + pub line_number: usize, + pub error_message: String, + pub code_content: String, +} + #[derive(Serialize)] pub struct ContentPromptContext { pub content_type: String, @@ -20,6 +27,7 @@ pub struct ContentPromptContext { pub document_content: String, pub user_prompt: String, pub rewrite_section: Option, + pub diagnostic_errors: Vec, } #[derive(Serialize)] @@ -32,13 +40,9 @@ pub struct TerminalAssistantPromptContext { pub user_prompt: String, } -/// Context required to generate a workflow step resolution prompt. -#[derive(Debug, Serialize)] -pub struct StepResolutionContext { - /// The full context, including ... tags - pub workflow_context: String, - /// The text of the specific step from the context to resolve - pub step_to_resolve: String, +#[derive(Serialize)] +pub struct ProjectSlashCommandPromptContext { + pub context_buffer: String, } pub struct PromptLoadingParams<'a> { @@ -82,10 +86,9 @@ impl PromptBuilder { /// and application context. /// * `handlebars` - An `Arc>` for registering and updating templates. fn watch_fs_for_template_overrides( - mut params: PromptLoadingParams, + params: PromptLoadingParams, handlebars: Arc>>, ) { - params.repo_path = None; let templates_dir = paths::prompt_overrides_dir(params.repo_path.as_deref()); params.cx.background_executor() .spawn(async move { @@ -204,11 +207,11 @@ impl PromptBuilder { pub fn generate_content_prompt( &self, user_prompt: String, - language_name: Option<&str>, + language_name: Option<&LanguageName>, buffer: BufferSnapshot, range: Range, ) -> Result { - let content_type = match language_name { + let content_type = match language_name.as_ref().map(|l| l.0.as_ref()) { None | Some("Markdown" | "Plain Text") => "text", Some(_) => "code", }; @@ -220,7 +223,8 @@ impl PromptBuilder { let before_range = 0..range.start; let truncated_before = if before_range.len() > MAX_CTX { is_truncated = true; - range.start - MAX_CTX..range.start + let start = buffer.clip_offset(range.start - MAX_CTX, text::Bias::Right); + start..range.start } else { before_range }; @@ -228,7 +232,8 @@ impl PromptBuilder { let after_range = range.end..buffer.len(); let truncated_after = if after_range.len() > MAX_CTX { is_truncated = true; - range.end..range.end + MAX_CTX + let end = buffer.clip_offset(range.end + MAX_CTX, text::Bias::Left); + range.end..end } else { after_range }; @@ -259,6 +264,17 @@ impl PromptBuilder { } else { None }; + let diagnostics = buffer.diagnostics_in_range::<_, Point>(range, false); + let diagnostic_errors: Vec = diagnostics + .map(|entry| { + let start = entry.range.start; + ContentPromptDiagnosticContext { + line_number: (start.row + 1) as usize, + error_message: entry.diagnostic.message.clone(), + code_content: buffer.text_for_range(entry.range.clone()).collect(), + } + }) + .collect(); let context = ContentPromptContext { content_type: content_type.to_string(), @@ -268,8 +284,8 @@ impl PromptBuilder { document_content, user_prompt, rewrite_section, + diagnostic_errors, }; - self.handlebars.lock().render("content_prompt", &context) } @@ -297,4 +313,14 @@ impl PromptBuilder { pub fn generate_workflow_prompt(&self) -> Result { self.handlebars.lock().render("edit_workflow", &()) } + + pub fn generate_project_slash_command_prompt( + &self, + context_buffer: String, + ) -> Result { + self.handlebars.lock().render( + "project_slash_command", + &ProjectSlashCommandPromptContext { context_buffer }, + ) + } } diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index b1a97688b2b46a..e430e35622a222 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -18,9 +18,11 @@ use std::{ }; use ui::ActiveTheme; use workspace::Workspace; - +pub mod auto_command; +pub mod cargo_workspace_command; pub mod context_server_command; pub mod default_command; +pub mod delta_command; pub mod diagnostics_command; pub mod docs_command; pub mod fetch_command; diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs new file mode 100644 index 00000000000000..14bbb7c8412b41 --- /dev/null +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -0,0 +1,362 @@ +use super::create_label_for_command; +use super::{SlashCommand, SlashCommandOutput}; +use anyhow::{anyhow, Result}; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use feature_flags::FeatureFlag; +use futures::StreamExt; +use gpui::{AppContext, AsyncAppContext, Task, WeakView}; +use language::{CodeLabel, LspAdapterDelegate}; +use language_model::{ + LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, Role, +}; +use semantic_index::{FileSummary, SemanticDb}; +use smol::channel; +use std::sync::{atomic::AtomicBool, Arc}; +use ui::{BorrowAppContext, WindowContext}; +use util::ResultExt; +use workspace::Workspace; + +pub struct AutoSlashCommandFeatureFlag; + +impl FeatureFlag for AutoSlashCommandFeatureFlag { + const NAME: &'static str = "auto-slash-command"; +} + +pub(crate) struct AutoCommand; + +impl SlashCommand for AutoCommand { + fn name(&self) -> String { + "auto".into() + } + + fn description(&self) -> String { + "Automatically infer what context to add".into() + } + + fn menu_text(&self) -> String { + self.description() + } + + fn label(&self, cx: &AppContext) -> CodeLabel { + create_label_for_command("auto", &["--prompt"], cx) + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancel: Arc, + workspace: Option>, + cx: &mut WindowContext, + ) -> Task>> { + // There's no autocomplete for a prompt, since it's arbitrary text. + // However, we can use this opportunity to kick off a drain of the backlog. + // That way, it can hopefully be done resummarizing by the time we've actually + // typed out our prompt. This re-runs on every keystroke during autocomplete, + // but in the future, we could instead do it only once, when /auto is first entered. + let Some(workspace) = workspace.and_then(|ws| ws.upgrade()) else { + log::warn!("workspace was dropped or unavailable during /auto autocomplete"); + + return Task::ready(Ok(Vec::new())); + }; + + let project = workspace.read(cx).project().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow!("No project indexer, cannot use /auto"))); + }; + + let cx: &mut AppContext = cx; + + cx.spawn(|cx: gpui::AsyncAppContext| async move { + let task = project_index.read_with(&cx, |project_index, cx| { + project_index.flush_summary_backlogs(cx) + })?; + + cx.background_executor().spawn(task).await; + + anyhow::Ok(Vec::new()) + }) + } + + fn requires_argument(&self) -> bool { + true + } + + fn run( + self: Arc, + arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: language::BufferSnapshot, + workspace: WeakView, + _delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let Some(workspace) = workspace.upgrade() else { + return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); + }; + if arguments.is_empty() { + return Task::ready(Err(anyhow!("missing prompt"))); + }; + let argument = arguments.join(" "); + let original_prompt = argument.to_string(); + let project = workspace.read(cx).project().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow!("no project indexer"))); + }; + + let task = cx.spawn(|cx: gpui::AsyncWindowContext| async move { + let summaries = project_index + .read_with(&cx, |project_index, cx| project_index.all_summaries(cx))? + .await?; + + commands_for_summaries(&summaries, &original_prompt, &cx).await + }); + + // As a convenience, append /auto's argument to the end of the prompt + // so you don't have to write it again. + let original_prompt = argument.to_string(); + + cx.background_executor().spawn(async move { + let commands = task.await?; + let mut prompt = String::new(); + + log::info!( + "Translating this response into slash-commands: {:?}", + commands + ); + + for command in commands { + prompt.push('/'); + prompt.push_str(&command.name); + prompt.push(' '); + prompt.push_str(&command.arg); + prompt.push('\n'); + } + + prompt.push('\n'); + prompt.push_str(&original_prompt); + + Ok(SlashCommandOutput { + text: prompt, + sections: Vec::new(), + run_commands_in_text: true, + }) + }) + } +} + +const PROMPT_INSTRUCTIONS_BEFORE_SUMMARY: &str = include_str!("prompt_before_summary.txt"); +const PROMPT_INSTRUCTIONS_AFTER_SUMMARY: &str = include_str!("prompt_after_summary.txt"); + +fn summaries_prompt(summaries: &[FileSummary], original_prompt: &str) -> String { + let json_summaries = serde_json::to_string(summaries).unwrap(); + + format!("{PROMPT_INSTRUCTIONS_BEFORE_SUMMARY}\n{json_summaries}\n{PROMPT_INSTRUCTIONS_AFTER_SUMMARY}\n{original_prompt}") +} + +/// The slash commands that the model is told about, and which we look for in the inference response. +const SUPPORTED_SLASH_COMMANDS: &[&str] = &["search", "file"]; + +#[derive(Debug, Clone)] +struct CommandToRun { + name: String, + arg: String, +} + +/// Given the pre-indexed file summaries for this project, as well as the original prompt +/// string passed to `/auto`, get a list of slash commands to run, along with their arguments. +/// +/// The prompt's output does not include the slashes (to reduce the chance that it makes a mistake), +/// so taking one of these returned Strings and turning it into a real slash-command-with-argument +/// involves prepending a slash to it. +/// +/// This function will validate that each of the returned lines begins with one of SUPPORTED_SLASH_COMMANDS. +/// Any other lines it encounters will be discarded, with a warning logged. +async fn commands_for_summaries( + summaries: &[FileSummary], + original_prompt: &str, + cx: &AsyncAppContext, +) -> Result> { + if summaries.is_empty() { + log::warn!("Inferring no context because there were no summaries available."); + return Ok(Vec::new()); + } + + // Use the globally configured model to translate the summaries into slash-commands, + // because Qwen2-7B-Instruct has not done a good job at that task. + let Some(model) = cx.update(|cx| LanguageModelRegistry::read_global(cx).active_model())? else { + log::warn!("Can't infer context because there's no active model."); + return Ok(Vec::new()); + }; + // Only go up to 90% of the actual max token count, to reduce chances of + // exceeding the token count due to inaccuracies in the token counting heuristic. + let max_token_count = (model.max_token_count() * 9) / 10; + + // Rather than recursing (which would require this async function use a pinned box), + // we use an explicit stack of arguments and answers for when we need to "recurse." + let mut stack = vec![summaries]; + let mut final_response = Vec::new(); + let mut prompts = Vec::new(); + + // TODO We only need to create multiple Requests because we currently + // don't have the ability to tell if a CompletionProvider::complete response + // was a "too many tokens in this request" error. If we had that, then + // we could try the request once, instead of having to make separate requests + // to check the token count and then afterwards to run the actual prompt. + let make_request = |prompt: String| LanguageModelRequest { + messages: vec![LanguageModelRequestMessage { + role: Role::User, + content: vec![prompt.into()], + // Nothing in here will benefit from caching + cache: false, + }], + tools: Vec::new(), + stop: Vec::new(), + temperature: None, + }; + + while let Some(current_summaries) = stack.pop() { + // The split can result in one slice being empty and the other having one element. + // Whenever that happens, skip the empty one. + if current_summaries.is_empty() { + continue; + } + + log::info!( + "Inferring prompt context using {} file summaries", + current_summaries.len() + ); + + let prompt = summaries_prompt(¤t_summaries, original_prompt); + let start = std::time::Instant::now(); + // Per OpenAI, 1 token ~= 4 chars in English (we go with 4.5 to overestimate a bit, because failed API requests cost a lot of perf) + // Verifying this against an actual model.count_tokens() confirms that it's usually within ~5% of the correct answer, whereas + // getting the correct answer from tiktoken takes hundreds of milliseconds (compared to this arithmetic being ~free). + // source: https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them + let token_estimate = prompt.len() * 2 / 9; + let duration = start.elapsed(); + log::info!( + "Time taken to count tokens for prompt of length {:?}B: {:?}", + prompt.len(), + duration + ); + + if token_estimate < max_token_count { + prompts.push(prompt); + } else if current_summaries.len() == 1 { + log::warn!("Inferring context for a single file's summary failed because the prompt's token length exceeded the model's token limit."); + } else { + log::info!( + "Context inference using file summaries resulted in a prompt containing {token_estimate} tokens, which exceeded the model's max of {max_token_count}. Retrying as two separate prompts, each including half the number of summaries.", + ); + let (left, right) = current_summaries.split_at(current_summaries.len() / 2); + stack.push(right); + stack.push(left); + } + } + + let all_start = std::time::Instant::now(); + + let (tx, rx) = channel::bounded(1024); + + let completion_streams = prompts + .into_iter() + .map(|prompt| { + let request = make_request(prompt.clone()); + let model = model.clone(); + let tx = tx.clone(); + let stream = model.stream_completion(request, &cx); + + (stream, tx) + }) + .collect::>(); + + cx.background_executor() + .spawn(async move { + let futures = completion_streams + .into_iter() + .enumerate() + .map(|(ix, (stream, tx))| async move { + let start = std::time::Instant::now(); + let events = stream.await?; + log::info!("Time taken for awaiting /await chunk stream #{ix}: {:?}", start.elapsed()); + + let completion: String = events + .filter_map(|event| async { + if let Ok(LanguageModelCompletionEvent::Text(text)) = event { + Some(text) + } else { + None + } + }) + .collect() + .await; + + log::info!("Time taken for all /auto chunks to come back for #{ix}: {:?}", start.elapsed()); + + for line in completion.split('\n') { + if let Some(first_space) = line.find(' ') { + let command = &line[..first_space].trim(); + let arg = &line[first_space..].trim(); + + tx.send(CommandToRun { + name: command.to_string(), + arg: arg.to_string(), + }) + .await?; + } else if !line.trim().is_empty() { + // All slash-commands currently supported in context inference need a space for the argument. + log::warn!( + "Context inference returned a non-blank line that contained no spaces (meaning no argument for the slash command): {:?}", + line + ); + } + } + + anyhow::Ok(()) + }) + .collect::>(); + + let _ = futures::future::try_join_all(futures).await.log_err(); + + let duration = all_start.elapsed(); + eprintln!("All futures completed in {:?}", duration); + }) + .await; + + drop(tx); // Close the channel so that rx.collect() won't hang. This is safe because all futures have completed. + let results = rx.collect::>().await; + eprintln!( + "Finished collecting from the channel with {} results", + results.len() + ); + for command in results { + // Don't return empty or duplicate commands + if !command.name.is_empty() + && !final_response + .iter() + .any(|cmd: &CommandToRun| cmd.name == command.name && cmd.arg == command.arg) + { + if SUPPORTED_SLASH_COMMANDS + .iter() + .any(|supported| &command.name == supported) + { + final_response.push(command); + } else { + log::warn!( + "Context inference returned an unrecognized slash command: {:?}", + command + ); + } + } + } + + // Sort the commands by name (reversed just so that /search appears before /file) + final_response.sort_by(|cmd1, cmd2| cmd1.name.cmp(&cmd2.name).reverse()); + + Ok(final_response) +} diff --git a/crates/assistant/src/slash_command/cargo_workspace_command.rs b/crates/assistant/src/slash_command/cargo_workspace_command.rs new file mode 100644 index 00000000000000..baf16d7f014cb2 --- /dev/null +++ b/crates/assistant/src/slash_command/cargo_workspace_command.rs @@ -0,0 +1,153 @@ +use super::{SlashCommand, SlashCommandOutput}; +use anyhow::{anyhow, Context, Result}; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use fs::Fs; +use gpui::{AppContext, Model, Task, WeakView}; +use language::{BufferSnapshot, LspAdapterDelegate}; +use project::{Project, ProjectPath}; +use std::{ + fmt::Write, + path::Path, + sync::{atomic::AtomicBool, Arc}, +}; +use ui::prelude::*; +use workspace::Workspace; + +pub(crate) struct CargoWorkspaceSlashCommand; + +impl CargoWorkspaceSlashCommand { + async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { + let buffer = fs.load(path_to_cargo_toml).await?; + let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; + + let mut message = String::new(); + writeln!(message, "You are in a Rust project.")?; + + if let Some(workspace) = cargo_toml.workspace { + writeln!( + message, + "The project is a Cargo workspace with the following members:" + )?; + for member in workspace.members { + writeln!(message, "- {member}")?; + } + + if !workspace.default_members.is_empty() { + writeln!(message, "The default members are:")?; + for member in workspace.default_members { + writeln!(message, "- {member}")?; + } + } + + if !workspace.dependencies.is_empty() { + writeln!( + message, + "The following workspace dependencies are installed:" + )?; + for dependency in workspace.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } else if let Some(package) = cargo_toml.package { + writeln!( + message, + "The project name is \"{name}\".", + name = package.name + )?; + + let description = package + .description + .as_ref() + .and_then(|description| description.get().ok().cloned()); + if let Some(description) = description.as_ref() { + writeln!(message, "It describes itself as \"{description}\".")?; + } + + if !cargo_toml.dependencies.is_empty() { + writeln!(message, "The following dependencies are installed:")?; + for dependency in cargo_toml.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } + + Ok(message) + } + + fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { + let worktree = project.read(cx).worktrees(cx).next()?; + let worktree = worktree.read(cx); + let entry = worktree.entry_for_path("Cargo.toml")?; + let path = ProjectPath { + worktree_id: worktree.id(), + path: entry.path.clone(), + }; + Some(Arc::from( + project.read(cx).absolute_path(&path, cx)?.as_path(), + )) + } +} + +impl SlashCommand for CargoWorkspaceSlashCommand { + fn name(&self) -> String { + "cargo-workspace".into() + } + + fn description(&self) -> String { + "insert project workspace metadata".into() + } + + fn menu_text(&self) -> String { + "Insert Project Workspace Metadata".into() + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancel: Arc, + _workspace: Option>, + _cx: &mut WindowContext, + ) -> Task>> { + Task::ready(Err(anyhow!("this command does not require argument"))) + } + + fn requires_argument(&self) -> bool { + false + } + + fn run( + self: Arc, + _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, + workspace: WeakView, + _delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let output = workspace.update(cx, |workspace, cx| { + let project = workspace.project().clone(); + let fs = workspace.project().read(cx).fs().clone(); + let path = Self::path_to_cargo_toml(project, cx); + let output = cx.background_executor().spawn(async move { + let path = path.with_context(|| "Cargo.toml not found")?; + Self::build_message(fs, &path).await + }); + + cx.foreground_executor().spawn(async move { + let text = output.await?; + let range = 0..text.len(); + Ok(SlashCommandOutput { + text, + sections: vec![SlashCommandOutputSection { + range, + icon: IconName::FileTree, + label: "Project".into(), + metadata: None, + }], + run_commands_in_text: false, + }) + }) + }); + output.unwrap_or_else(|error| Task::ready(Err(error))) + } +} diff --git a/crates/assistant/src/slash_command/context_server_command.rs b/crates/assistant/src/slash_command/context_server_command.rs index 8ae9430a993e16..3db057d07494cd 100644 --- a/crates/assistant/src/slash_command/context_server_command.rs +++ b/crates/assistant/src/slash_command/context_server_command.rs @@ -1,3 +1,4 @@ +use super::create_label_for_command; use anyhow::{anyhow, Result}; use assistant_slash_command::{ AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput, @@ -6,10 +7,10 @@ use assistant_slash_command::{ use collections::HashMap; use context_servers::{ manager::{ContextServer, ContextServerManager}, - protocol::PromptInfo, + types::Prompt, }; -use gpui::{Task, WeakView, WindowContext}; -use language::{CodeLabel, LspAdapterDelegate}; +use gpui::{AppContext, Task, WeakView, WindowContext}; +use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate}; use std::sync::atomic::AtomicBool; use std::sync::Arc; use text::LineEnding; @@ -18,11 +19,11 @@ use workspace::Workspace; pub struct ContextServerSlashCommand { server_id: String, - prompt: PromptInfo, + prompt: Prompt, } impl ContextServerSlashCommand { - pub fn new(server: &Arc, prompt: PromptInfo) -> Self { + pub fn new(server: &Arc, prompt: Prompt) -> Self { Self { server_id: server.id.clone(), prompt, @@ -35,12 +36,28 @@ impl SlashCommand for ContextServerSlashCommand { self.prompt.name.clone() } + fn label(&self, cx: &AppContext) -> language::CodeLabel { + let mut parts = vec![self.prompt.name.as_str()]; + if let Some(args) = &self.prompt.arguments { + if let Some(arg) = args.first() { + parts.push(arg.name.as_str()); + } + } + create_label_for_command(&parts[0], &parts[1..], cx) + } + fn description(&self) -> String { - format!("Run context server command: {}", self.prompt.name) + match &self.prompt.description { + Some(desc) => desc.clone(), + None => format!("Run '{}' from {}", self.prompt.name, self.server_id), + } } fn menu_text(&self) -> String { - format!("Run '{}' from {}", self.prompt.name, self.server_id) + match &self.prompt.description { + Some(desc) => desc.clone(), + None => format!("Run '{}' from {}", self.prompt.name, self.server_id), + } } fn requires_argument(&self) -> bool { @@ -96,7 +113,6 @@ impl SlashCommand for ContextServerSlashCommand { replace_previous_arguments: false, }) .collect(); - Ok(completions) }) } else { @@ -107,6 +123,8 @@ impl SlashCommand for ContextServerSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -141,6 +159,7 @@ impl SlashCommand for ContextServerSlashCommand { .description .unwrap_or(format!("Result from {}", prompt_name)), ), + metadata: None, }], text: prompt, run_commands_in_text: false, @@ -152,7 +171,7 @@ impl SlashCommand for ContextServerSlashCommand { } } -fn completion_argument(prompt: &PromptInfo, arguments: &[String]) -> Result<(String, String)> { +fn completion_argument(prompt: &Prompt, arguments: &[String]) -> Result<(String, String)> { if arguments.is_empty() { return Err(anyhow!("No arguments given")); } @@ -168,7 +187,7 @@ fn completion_argument(prompt: &PromptInfo, arguments: &[String]) -> Result<(Str } } -fn prompt_arguments(prompt: &PromptInfo, arguments: &[String]) -> Result> { +fn prompt_arguments(prompt: &Prompt, arguments: &[String]) -> Result> { match &prompt.arguments { Some(args) if args.len() > 1 => Err(anyhow!( "Prompt has more than one argument, which is not supported" @@ -197,7 +216,7 @@ fn prompt_arguments(prompt: &PromptInfo, arguments: &[String]) -> Result bool { +pub fn acceptable_prompt(prompt: &Prompt) -> bool { match &prompt.arguments { None => true, Some(args) if args.len() <= 1 => true, diff --git a/crates/assistant/src/slash_command/default_command.rs b/crates/assistant/src/slash_command/default_command.rs index 18db87b3228b9c..4199840300a24c 100644 --- a/crates/assistant/src/slash_command/default_command.rs +++ b/crates/assistant/src/slash_command/default_command.rs @@ -3,7 +3,7 @@ use crate::prompt_library::PromptStore; use anyhow::{anyhow, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use std::{ fmt::Write, sync::{atomic::AtomicBool, Arc}, @@ -43,6 +43,8 @@ impl SlashCommand for DefaultSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -70,6 +72,7 @@ impl SlashCommand for DefaultSlashCommand { range: 0..text.len(), icon: IconName::Library, label: "Default".into(), + metadata: None, }], text, run_commands_in_text: true, diff --git a/crates/assistant/src/slash_command/delta_command.rs b/crates/assistant/src/slash_command/delta_command.rs new file mode 100644 index 00000000000000..6f697ecbb9bcba --- /dev/null +++ b/crates/assistant/src/slash_command/delta_command.rs @@ -0,0 +1,109 @@ +use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand}; +use anyhow::Result; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, +}; +use collections::HashSet; +use futures::future; +use gpui::{Task, WeakView, WindowContext}; +use language::{BufferSnapshot, LspAdapterDelegate}; +use std::sync::{atomic::AtomicBool, Arc}; +use text::OffsetRangeExt; +use workspace::Workspace; + +pub(crate) struct DeltaSlashCommand; + +impl SlashCommand for DeltaSlashCommand { + fn name(&self) -> String { + "delta".into() + } + + fn description(&self) -> String { + "Re-insert changed files".into() + } + + fn menu_text(&self) -> String { + self.description() + } + + fn requires_argument(&self) -> bool { + false + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancellation_flag: Arc, + _workspace: Option>, + _cx: &mut WindowContext, + ) -> Task>> { + unimplemented!() + } + + fn run( + self: Arc, + _arguments: &[String], + context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: BufferSnapshot, + workspace: WeakView, + delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let mut paths = HashSet::default(); + let mut file_command_old_outputs = Vec::new(); + let mut file_command_new_outputs = Vec::new(); + for section in context_slash_command_output_sections.iter().rev() { + if let Some(metadata) = section + .metadata + .as_ref() + .and_then(|value| serde_json::from_value::(value.clone()).ok()) + { + if paths.insert(metadata.path.clone()) { + file_command_old_outputs.push( + context_buffer + .as_rope() + .slice(section.range.to_offset(&context_buffer)), + ); + file_command_new_outputs.push(Arc::new(FileSlashCommand).run( + &[metadata.path.clone()], + context_slash_command_output_sections, + context_buffer.clone(), + workspace.clone(), + delegate.clone(), + cx, + )); + } + } + } + + cx.background_executor().spawn(async move { + let mut output = SlashCommandOutput::default(); + + let file_command_new_outputs = future::join_all(file_command_new_outputs).await; + for (old_text, new_output) in file_command_old_outputs + .into_iter() + .zip(file_command_new_outputs) + { + if let Ok(new_output) = new_output { + if let Some(file_command_range) = new_output.sections.first() { + let new_text = &new_output.text[file_command_range.range.clone()]; + if old_text.chars().ne(new_text.chars()) { + output.sections.extend(new_output.sections.into_iter().map( + |section| SlashCommandOutputSection { + range: output.text.len() + section.range.start + ..output.text.len() + section.range.end, + icon: section.icon, + label: section.label, + metadata: section.metadata, + }, + )); + output.text.push_str(&new_output.text); + } + } + } + } + + Ok(output) + }) + } +} diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 6c821bd7b4a5c9..146a4e5d366dd3 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -9,10 +9,9 @@ use language::{ }; use project::{DiagnosticSummary, PathMatchCandidateSet, Project}; use rope::Point; -use std::fmt::Write; -use std::path::{Path, PathBuf}; use std::{ - ops::Range, + fmt::Write, + path::{Path, PathBuf}, sync::{atomic::AtomicBool, Arc}, }; use ui::prelude::*; @@ -96,7 +95,7 @@ impl SlashCommand for DiagnosticsSlashCommand { } fn menu_text(&self) -> String { - "Insert Diagnostics".into() + self.description() } fn requires_argument(&self) -> bool { @@ -163,6 +162,8 @@ impl SlashCommand for DiagnosticsSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -175,68 +176,7 @@ impl SlashCommand for DiagnosticsSlashCommand { let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx); - cx.spawn(move |_| async move { - let Some((text, sections)) = task.await? else { - return Ok(SlashCommandOutput { - sections: vec![SlashCommandOutputSection { - range: 0..1, - icon: IconName::Library, - label: "No Diagnostics".into(), - }], - text: "\n".to_string(), - run_commands_in_text: true, - }); - }; - - let sections = sections - .into_iter() - .map(|(range, placeholder_type)| SlashCommandOutputSection { - range, - icon: match placeholder_type { - PlaceholderType::Root(_, _) => IconName::ExclamationTriangle, - PlaceholderType::File(_) => IconName::File, - PlaceholderType::Diagnostic(DiagnosticType::Error, _) => IconName::XCircle, - PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => { - IconName::ExclamationTriangle - } - }, - label: match placeholder_type { - PlaceholderType::Root(summary, source) => { - let mut label = String::new(); - label.push_str("Diagnostics"); - if let Some(source) = source { - write!(label, " ({})", source).unwrap(); - } - - if summary.error_count > 0 || summary.warning_count > 0 { - label.push(':'); - - if summary.error_count > 0 { - write!(label, " {} errors", summary.error_count).unwrap(); - if summary.warning_count > 0 { - label.push_str(","); - } - } - - if summary.warning_count > 0 { - write!(label, " {} warnings", summary.warning_count).unwrap(); - } - } - - label.into() - } - PlaceholderType::File(file_path) => file_path.into(), - PlaceholderType::Diagnostic(_, message) => message.into(), - }, - }) - .collect(); - - Ok(SlashCommandOutput { - text, - sections, - run_commands_in_text: false, - }) - }) + cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) }) } } @@ -277,7 +217,7 @@ fn collect_diagnostics( project: Model, options: Options, cx: &mut AppContext, -) -> Task, PlaceholderType)>)>>> { +) -> Task>> { let error_source = if let Some(path_matcher) = &options.path_matcher { debug_assert_eq!(path_matcher.sources().len(), 1); Some(path_matcher.sources().first().cloned().unwrap_or_default()) @@ -318,13 +258,13 @@ fn collect_diagnostics( .collect(); cx.spawn(|mut cx| async move { - let mut text = String::new(); + let mut output = SlashCommandOutput::default(); + if let Some(error_source) = error_source.as_ref() { - writeln!(text, "diagnostics: {}", error_source).unwrap(); + writeln!(output.text, "diagnostics: {}", error_source).unwrap(); } else { - writeln!(text, "diagnostics").unwrap(); + writeln!(output.text, "diagnostics").unwrap(); } - let mut sections: Vec<(Range, PlaceholderType)> = Vec::new(); let mut project_summary = DiagnosticSummary::default(); for (project_path, path, summary) in diagnostic_summaries { @@ -341,10 +281,10 @@ fn collect_diagnostics( continue; } - let last_end = text.len(); + let last_end = output.text.len(); let file_path = path.to_string_lossy().to_string(); if !glob_is_exact_file_match { - writeln!(&mut text, "{file_path}").unwrap(); + writeln!(&mut output.text, "{file_path}").unwrap(); } if let Some(buffer) = project_handle @@ -352,75 +292,73 @@ fn collect_diagnostics( .await .log_err() { - collect_buffer_diagnostics( - &mut text, - &mut sections, - cx.read_model(&buffer, |buffer, _| buffer.snapshot())?, - options.include_warnings, - ); + let snapshot = cx.read_model(&buffer, |buffer, _| buffer.snapshot())?; + collect_buffer_diagnostics(&mut output, &snapshot, options.include_warnings); } if !glob_is_exact_file_match { - sections.push(( - last_end..text.len().saturating_sub(1), - PlaceholderType::File(file_path), - )) + output.sections.push(SlashCommandOutputSection { + range: last_end..output.text.len().saturating_sub(1), + icon: IconName::File, + label: file_path.into(), + metadata: None, + }); } } // No diagnostics found - if sections.is_empty() { + if output.sections.is_empty() { return Ok(None); } - sections.push(( - 0..text.len(), - PlaceholderType::Root(project_summary, error_source), - )); - Ok(Some((text, sections))) - }) -} - -pub fn buffer_has_error_diagnostics(snapshot: &BufferSnapshot) -> bool { - for (_, group) in snapshot.diagnostic_groups(None) { - let entry = &group.entries[group.primary_ix]; - if entry.diagnostic.severity == DiagnosticSeverity::ERROR { - return true; + let mut label = String::new(); + label.push_str("Diagnostics"); + if let Some(source) = error_source { + write!(label, " ({})", source).unwrap(); } - } - false -} -pub fn write_single_file_diagnostics( - output: &mut String, - path: Option<&Path>, - snapshot: &BufferSnapshot, -) -> bool { - if let Some(path) = path { - if buffer_has_error_diagnostics(&snapshot) { - output.push_str("/diagnostics "); - output.push_str(&path.to_string_lossy()); - return true; + if project_summary.error_count > 0 || project_summary.warning_count > 0 { + label.push(':'); + + if project_summary.error_count > 0 { + write!(label, " {} errors", project_summary.error_count).unwrap(); + if project_summary.warning_count > 0 { + label.push_str(","); + } + } + + if project_summary.warning_count > 0 { + write!(label, " {} warnings", project_summary.warning_count).unwrap(); + } } - } - false + + output.sections.insert( + 0, + SlashCommandOutputSection { + range: 0..output.text.len(), + icon: IconName::Warning, + label: label.into(), + metadata: None, + }, + ); + + Ok(Some(output)) + }) } -fn collect_buffer_diagnostics( - text: &mut String, - sections: &mut Vec<(Range, PlaceholderType)>, - snapshot: BufferSnapshot, +pub fn collect_buffer_diagnostics( + output: &mut SlashCommandOutput, + snapshot: &BufferSnapshot, include_warnings: bool, ) { for (_, group) in snapshot.diagnostic_groups(None) { let entry = &group.entries[group.primary_ix]; - collect_diagnostic(text, sections, entry, &snapshot, include_warnings) + collect_diagnostic(output, entry, &snapshot, include_warnings) } } fn collect_diagnostic( - text: &mut String, - sections: &mut Vec<(Range, PlaceholderType)>, + output: &mut SlashCommandOutput, entry: &DiagnosticEntry, snapshot: &BufferSnapshot, include_warnings: bool, @@ -428,17 +366,17 @@ fn collect_diagnostic( const EXCERPT_EXPANSION_SIZE: u32 = 2; const MAX_MESSAGE_LENGTH: usize = 2000; - let ty = match entry.diagnostic.severity { + let (ty, icon) = match entry.diagnostic.severity { DiagnosticSeverity::WARNING => { if !include_warnings { return; } - DiagnosticType::Warning + ("warning", IconName::Warning) } - DiagnosticSeverity::ERROR => DiagnosticType::Error, + DiagnosticSeverity::ERROR => ("error", IconName::XCircle), _ => return, }; - let prev_len = text.len(); + let prev_len = output.text.len(); let range = entry.range.to_point(snapshot); let diagnostic_row_number = range.start.row + 1; @@ -448,11 +386,11 @@ fn collect_diagnostic( let excerpt_range = Point::new(start_row, 0).to_offset(&snapshot)..Point::new(end_row, 0).to_offset(&snapshot); - text.push_str("```"); + output.text.push_str("```"); if let Some(language_name) = snapshot.language().map(|l| l.code_fence_block_name()) { - text.push_str(&language_name); + output.text.push_str(&language_name); } - text.push('\n'); + output.text.push('\n'); let mut buffer_text = String::new(); for chunk in snapshot.text_for_range(excerpt_range) { @@ -461,46 +399,26 @@ fn collect_diagnostic( for (i, line) in buffer_text.lines().enumerate() { let line_number = start_row + i as u32 + 1; - writeln!(text, "{}", line).unwrap(); + writeln!(output.text, "{}", line).unwrap(); if line_number == diagnostic_row_number { - text.push_str("//"); - let prev_len = text.len(); - write!(text, " {}: ", ty.as_str()).unwrap(); - let padding = text.len() - prev_len; + output.text.push_str("//"); + let prev_len = output.text.len(); + write!(output.text, " {}: ", ty).unwrap(); + let padding = output.text.len() - prev_len; let message = util::truncate(&entry.diagnostic.message, MAX_MESSAGE_LENGTH) .replace('\n', format!("\n//{:padding$}", "").as_str()); - writeln!(text, "{message}").unwrap(); + writeln!(output.text, "{message}").unwrap(); } } - writeln!(text, "```").unwrap(); - sections.push(( - prev_len..text.len().saturating_sub(1), - PlaceholderType::Diagnostic(ty, entry.diagnostic.message.clone()), - )) -} - -#[derive(Clone)] -pub enum PlaceholderType { - Root(DiagnosticSummary, Option), - File(String), - Diagnostic(DiagnosticType, String), -} - -#[derive(Copy, Clone)] -pub enum DiagnosticType { - Warning, - Error, -} - -impl DiagnosticType { - pub fn as_str(&self) -> &'static str { - match self { - DiagnosticType::Warning => "warning", - DiagnosticType::Error => "error", - } - } + writeln!(output.text, "```").unwrap(); + output.sections.push(SlashCommandOutputSection { + range: prev_len..output.text.len().saturating_sub(1), + icon, + label: entry.diagnostic.message.clone().into(), + metadata: None, + }); } diff --git a/crates/assistant/src/slash_command/docs_command.rs b/crates/assistant/src/slash_command/docs_command.rs index e114cfeab74b31..399ede9d999549 100644 --- a/crates/assistant/src/slash_command/docs_command.rs +++ b/crates/assistant/src/slash_command/docs_command.rs @@ -12,7 +12,7 @@ use indexed_docs::{ DocsDotRsProvider, IndexedDocsRegistry, IndexedDocsStore, LocalRustdocProvider, PackageName, ProviderId, }; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use project::{Project, ProjectPath}; use ui::prelude::*; use util::{maybe, ResultExt}; @@ -269,6 +269,8 @@ impl SlashCommand for DocsSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -349,6 +351,7 @@ impl SlashCommand for DocsSlashCommand { range, icon: IconName::FileDoc, label: format!("docs ({provider}): {key}",).into(), + metadata: None, }) .collect(), run_commands_in_text: false, diff --git a/crates/assistant/src/slash_command/fetch_command.rs b/crates/assistant/src/slash_command/fetch_command.rs index 8ecb6de7590663..3a01bb645a36bb 100644 --- a/crates/assistant/src/slash_command/fetch_command.rs +++ b/crates/assistant/src/slash_command/fetch_command.rs @@ -11,7 +11,7 @@ use futures::AsyncReadExt; use gpui::{Task, WeakView}; use html_to_markdown::{convert_html_to_markdown, markdown, TagHandler}; use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use workspace::Workspace; @@ -104,11 +104,11 @@ impl SlashCommand for FetchSlashCommand { } fn description(&self) -> String { - "insert URL contents".into() + "Insert fetched URL contents".into() } fn menu_text(&self) -> String { - "Insert fetched URL contents".into() + self.description() } fn requires_argument(&self) -> bool { @@ -128,6 +128,8 @@ impl SlashCommand for FetchSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -161,6 +163,7 @@ impl SlashCommand for FetchSlashCommand { range, icon: IconName::AtSign, label: format!("fetch {}", url).into(), + metadata: None, }], run_commands_in_text: false, }) diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index c253e5b91c0297..6da56d064178ad 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -1,13 +1,14 @@ -use super::{diagnostics_command::write_single_file_diagnostics, SlashCommand, SlashCommandOutput}; +use super::{diagnostics_command::collect_buffer_diagnostics, SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{AfterCompletion, ArgumentCompletion, SlashCommandOutputSection}; use fuzzy::PathMatch; use gpui::{AppContext, Model, Task, View, WeakView}; use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate}; use project::{PathMatchCandidateSet, Project}; +use serde::{Deserialize, Serialize}; use std::{ fmt::Write, - ops::Range, + ops::{Range, RangeInclusive}, path::{Path, PathBuf}, sync::{atomic::AtomicBool, Arc}, }; @@ -109,11 +110,11 @@ impl SlashCommand for FileSlashCommand { } fn description(&self) -> String { - "insert file".into() + "Insert file".into() } fn menu_text(&self) -> String { - "Insert File".into() + self.description() } fn requires_argument(&self) -> bool { @@ -164,11 +165,7 @@ impl SlashCommand for FileSlashCommand { Some(ArgumentCompletion { label, new_text: text, - after_completion: if path_match.is_dir { - AfterCompletion::Compose - } else { - AfterCompletion::Run - }, + after_completion: AfterCompletion::Compose, replace_previous_arguments: false, }) }) @@ -179,6 +176,8 @@ impl SlashCommand for FileSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -191,54 +190,15 @@ impl SlashCommand for FileSlashCommand { return Task::ready(Err(anyhow!("missing path"))); }; - let task = collect_files(workspace.read(cx).project().clone(), arguments, cx); - - cx.foreground_executor().spawn(async move { - let output = task.await?; - Ok(SlashCommandOutput { - text: output.completion_text, - sections: output - .files - .into_iter() - .map(|file| { - build_entry_output_section( - file.range_in_text, - Some(&file.path), - file.entry_type == EntryType::Directory, - None, - ) - }) - .collect(), - run_commands_in_text: true, - }) - }) + collect_files(workspace.read(cx).project().clone(), arguments, cx) } } -#[derive(Clone, Copy, PartialEq, Debug)] -enum EntryType { - File, - Directory, -} - -#[derive(Clone, PartialEq, Debug)] -struct FileCommandOutput { - completion_text: String, - files: Vec, -} - -#[derive(Clone, PartialEq, Debug)] -struct OutputFile { - range_in_text: Range, - path: PathBuf, - entry_type: EntryType, -} - fn collect_files( project: Model, glob_inputs: &[String], cx: &mut AppContext, -) -> Task> { +) -> Task> { let Ok(matchers) = glob_inputs .into_iter() .map(|glob_input| { @@ -258,8 +218,7 @@ fn collect_files( .collect::>(); cx.spawn(|mut cx| async move { - let mut text = String::new(); - let mut ranges = Vec::new(); + let mut output = SlashCommandOutput::default(); for snapshot in snapshots { let worktree_id = snapshot.id(); let mut directory_stack: Vec<(Arc, String, usize)> = Vec::new(); @@ -283,11 +242,12 @@ fn collect_files( break; } let (_, entry_name, start) = directory_stack.pop().unwrap(); - ranges.push(OutputFile { - range_in_text: start..text.len().saturating_sub(1), - path: PathBuf::from(entry_name), - entry_type: EntryType::Directory, - }); + output.sections.push(build_entry_output_section( + start..output.text.len().saturating_sub(1), + Some(&PathBuf::from(entry_name)), + true, + None, + )); } let filename = entry @@ -319,21 +279,23 @@ fn collect_files( continue; } let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/"); - let entry_start = text.len(); + let entry_start = output.text.len(); if prefix_paths.is_empty() { if is_top_level_directory { - text.push_str(&path_including_worktree_name.to_string_lossy()); + output + .text + .push_str(&path_including_worktree_name.to_string_lossy()); is_top_level_directory = false; } else { - text.push_str(&filename); + output.text.push_str(&filename); } directory_stack.push((entry.path.clone(), filename, entry_start)); } else { let entry_name = format!("{}/{}", prefix_paths, &filename); - text.push_str(&entry_name); + output.text.push_str(&entry_name); directory_stack.push((entry.path.clone(), entry_name, entry_start)); } - text.push('\n'); + output.text.push('\n'); } else if entry.is_file() { let Some(open_buffer_task) = project_handle .update(&mut cx, |project, cx| { @@ -344,28 +306,13 @@ fn collect_files( continue; }; if let Some(buffer) = open_buffer_task.await.log_err() { - let buffer_snapshot = - cx.read_model(&buffer, |buffer, _| buffer.snapshot())?; - let prev_len = text.len(); - collect_file_content( - &mut text, - &buffer_snapshot, - path_including_worktree_name.to_string_lossy().to_string(), - ); - text.push('\n'); - if !write_single_file_diagnostics( - &mut text, + let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; + append_buffer_to_output( + &snapshot, Some(&path_including_worktree_name), - &buffer_snapshot, - ) { - text.pop(); - } - ranges.push(OutputFile { - range_in_text: prev_len..text.len(), - path: path_including_worktree_name, - entry_type: EntryType::File, - }); - text.push('\n'); + &mut output, + ) + .log_err(); } } } @@ -375,43 +322,30 @@ fn collect_files( let mut root_path = PathBuf::new(); root_path.push(snapshot.root_name()); root_path.push(&dir); - ranges.push(OutputFile { - range_in_text: start..text.len(), - path: root_path, - entry_type: EntryType::Directory, - }); + output.sections.push(build_entry_output_section( + start..output.text.len(), + Some(&root_path), + true, + None, + )); } else { - ranges.push(OutputFile { - range_in_text: start..text.len(), - path: PathBuf::from(entry.as_str()), - entry_type: EntryType::Directory, - }); + output.sections.push(build_entry_output_section( + start..output.text.len(), + Some(&PathBuf::from(entry.as_str())), + true, + None, + )); } } } - Ok(FileCommandOutput { - completion_text: text, - files: ranges, - }) + Ok(output) }) } -fn collect_file_content(buffer: &mut String, snapshot: &BufferSnapshot, filename: String) { - let mut content = snapshot.text(); - LineEnding::normalize(&mut content); - buffer.reserve(filename.len() + content.len() + 9); - buffer.push_str(&codeblock_fence_for_path( - Some(&PathBuf::from(filename)), - None, - )); - buffer.push_str(&content); - if !buffer.ends_with('\n') { - buffer.push('\n'); - } - buffer.push_str("```"); -} - -pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option>) -> String { +pub fn codeblock_fence_for_path( + path: Option<&Path>, + row_range: Option>, +) -> String { let mut text = String::new(); write!(text, "```").unwrap(); @@ -426,13 +360,18 @@ pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option, path: Option<&Path>, @@ -458,6 +397,16 @@ pub fn build_entry_output_section( range, icon, label: label.into(), + metadata: if is_directory { + None + } else { + path.and_then(|path| { + serde_json::to_value(FileCommandMetadata { + path: path.to_string_lossy().to_string(), + }) + .ok() + }) + }, } } @@ -543,6 +492,36 @@ mod custom_path_matcher { } } +pub fn append_buffer_to_output( + buffer: &BufferSnapshot, + path: Option<&Path>, + output: &mut SlashCommandOutput, +) -> Result<()> { + let prev_len = output.text.len(); + + let mut content = buffer.text(); + LineEnding::normalize(&mut content); + output.text.push_str(&codeblock_fence_for_path(path, None)); + output.text.push_str(&content); + if !output.text.ends_with('\n') { + output.text.push('\n'); + } + output.text.push_str("```"); + output.text.push('\n'); + + let section_ix = output.sections.len(); + collect_buffer_diagnostics(output, buffer, false); + + output.sections.insert( + section_ix, + build_entry_output_section(prev_len..output.text.len(), path, false, None), + ); + + output.text.push('\n'); + + Ok(()) +} + #[cfg(test)] mod test { use fs::FakeFs; @@ -595,9 +574,9 @@ mod test { .await .unwrap(); - assert!(result_1.completion_text.starts_with("root/dir")); + assert!(result_1.text.starts_with("root/dir")); // 4 files + 2 directories - assert_eq!(6, result_1.files.len()); + assert_eq!(result_1.sections.len(), 6); let result_2 = cx .update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx)) @@ -611,9 +590,9 @@ mod test { .await .unwrap(); - assert!(result.completion_text.starts_with("root/dir")); + assert!(result.text.starts_with("root/dir")); // 5 files + 2 directories - assert_eq!(7, result.files.len()); + assert_eq!(result.sections.len(), 7); // Ensure that the project lasts until after the last await drop(project); @@ -658,36 +637,27 @@ mod test { .unwrap(); // Sanity check - assert!(result.completion_text.starts_with("zed/assets/themes\n")); - assert_eq!(7, result.files.len()); + assert!(result.text.starts_with("zed/assets/themes\n")); + assert_eq!(result.sections.len(), 7); // Ensure that full file paths are included in the real output - assert!(result - .completion_text - .contains("zed/assets/themes/andromeda/LICENSE")); - assert!(result - .completion_text - .contains("zed/assets/themes/ayu/LICENSE")); - assert!(result - .completion_text - .contains("zed/assets/themes/summercamp/LICENSE")); - - assert_eq!("summercamp", result.files[5].path.to_string_lossy()); + assert!(result.text.contains("zed/assets/themes/andromeda/LICENSE")); + assert!(result.text.contains("zed/assets/themes/ayu/LICENSE")); + assert!(result.text.contains("zed/assets/themes/summercamp/LICENSE")); + + assert_eq!(result.sections[5].label, "summercamp"); // Ensure that things are in descending order, with properly relativized paths assert_eq!( - "zed/assets/themes/andromeda/LICENSE", - result.files[0].path.to_string_lossy() - ); - assert_eq!("andromeda", result.files[1].path.to_string_lossy()); - assert_eq!( - "zed/assets/themes/ayu/LICENSE", - result.files[2].path.to_string_lossy() + result.sections[0].label, + "zed/assets/themes/andromeda/LICENSE" ); - assert_eq!("ayu", result.files[3].path.to_string_lossy()); + assert_eq!(result.sections[1].label, "andromeda"); + assert_eq!(result.sections[2].label, "zed/assets/themes/ayu/LICENSE"); + assert_eq!(result.sections[3].label, "ayu"); assert_eq!( - "zed/assets/themes/summercamp/LICENSE", - result.files[4].path.to_string_lossy() + result.sections[4].label, + "zed/assets/themes/summercamp/LICENSE" ); // Ensure that the project lasts until after the last await @@ -727,27 +697,24 @@ mod test { .await .unwrap(); - assert!(result.completion_text.starts_with("zed/assets/themes\n")); - assert_eq!( - "zed/assets/themes/LICENSE", - result.files[0].path.to_string_lossy() - ); + assert!(result.text.starts_with("zed/assets/themes\n")); + assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE"); assert_eq!( - "zed/assets/themes/summercamp/LICENSE", - result.files[1].path.to_string_lossy() + result.sections[1].label, + "zed/assets/themes/summercamp/LICENSE" ); assert_eq!( - "zed/assets/themes/summercamp/subdir/LICENSE", - result.files[2].path.to_string_lossy() + result.sections[2].label, + "zed/assets/themes/summercamp/subdir/LICENSE" ); assert_eq!( - "zed/assets/themes/summercamp/subdir/subsubdir/LICENSE", - result.files[3].path.to_string_lossy() + result.sections[3].label, + "zed/assets/themes/summercamp/subdir/subsubdir/LICENSE" ); - assert_eq!("subsubdir", result.files[4].path.to_string_lossy()); - assert_eq!("subdir", result.files[5].path.to_string_lossy()); - assert_eq!("summercamp", result.files[6].path.to_string_lossy()); - assert_eq!("zed/assets/themes", result.files[7].path.to_string_lossy()); + assert_eq!(result.sections[4].label, "subsubdir"); + assert_eq!(result.sections[5].label, "subdir"); + assert_eq!(result.sections[6].label, "summercamp"); + assert_eq!(result.sections[7].label, "zed/assets/themes"); // Ensure that the project lasts until after the last await drop(project); diff --git a/crates/assistant/src/slash_command/now_command.rs b/crates/assistant/src/slash_command/now_command.rs index eb6277a7d92561..221ba05cafc623 100644 --- a/crates/assistant/src/slash_command/now_command.rs +++ b/crates/assistant/src/slash_command/now_command.rs @@ -7,7 +7,7 @@ use assistant_slash_command::{ }; use chrono::Local; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use workspace::Workspace; @@ -19,11 +19,11 @@ impl SlashCommand for NowSlashCommand { } fn description(&self) -> String { - "insert the current date and time".into() + "Insert current date and time".into() } fn menu_text(&self) -> String { - "Insert Current Date and Time".into() + self.description() } fn requires_argument(&self) -> bool { @@ -43,6 +43,8 @@ impl SlashCommand for NowSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, _cx: &mut WindowContext, @@ -57,6 +59,7 @@ impl SlashCommand for NowSlashCommand { range, icon: IconName::CountdownTimer, label: now.to_rfc2822().into(), + metadata: None, }], run_commands_in_text: false, })) diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 8182734e7214f8..58fef8f338771d 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -1,90 +1,39 @@ -use super::{SlashCommand, SlashCommandOutput}; -use anyhow::{anyhow, Context, Result}; +use super::{ + create_label_for_command, search_command::add_search_result_section, SlashCommand, + SlashCommandOutput, +}; +use crate::PromptBuilder; +use anyhow::{anyhow, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; -use fs::Fs; -use gpui::{AppContext, Model, Task, WeakView}; -use language::LspAdapterDelegate; -use project::{Project, ProjectPath}; +use feature_flags::FeatureFlag; +use gpui::{AppContext, Task, WeakView, WindowContext}; +use language::{Anchor, CodeLabel, LspAdapterDelegate}; +use language_model::{LanguageModelRegistry, LanguageModelTool}; +use schemars::JsonSchema; +use semantic_index::SemanticDb; +use serde::Deserialize; + +pub struct ProjectSlashCommandFeatureFlag; + +impl FeatureFlag for ProjectSlashCommandFeatureFlag { + const NAME: &'static str = "project-slash-command"; +} + use std::{ - fmt::Write, - path::Path, + fmt::Write as _, + ops::DerefMut, sync::{atomic::AtomicBool, Arc}, }; -use ui::prelude::*; +use ui::{BorrowAppContext as _, IconName}; use workspace::Workspace; -pub(crate) struct ProjectSlashCommand; +pub struct ProjectSlashCommand { + prompt_builder: Arc, +} impl ProjectSlashCommand { - async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { - let buffer = fs.load(path_to_cargo_toml).await?; - let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; - - let mut message = String::new(); - writeln!(message, "You are in a Rust project.")?; - - if let Some(workspace) = cargo_toml.workspace { - writeln!( - message, - "The project is a Cargo workspace with the following members:" - )?; - for member in workspace.members { - writeln!(message, "- {member}")?; - } - - if !workspace.default_members.is_empty() { - writeln!(message, "The default members are:")?; - for member in workspace.default_members { - writeln!(message, "- {member}")?; - } - } - - if !workspace.dependencies.is_empty() { - writeln!( - message, - "The following workspace dependencies are installed:" - )?; - for dependency in workspace.dependencies.keys() { - writeln!(message, "- {dependency}")?; - } - } - } else if let Some(package) = cargo_toml.package { - writeln!( - message, - "The project name is \"{name}\".", - name = package.name - )?; - - let description = package - .description - .as_ref() - .and_then(|description| description.get().ok().cloned()); - if let Some(description) = description.as_ref() { - writeln!(message, "It describes itself as \"{description}\".")?; - } - - if !cargo_toml.dependencies.is_empty() { - writeln!(message, "The following dependencies are installed:")?; - for dependency in cargo_toml.dependencies.keys() { - writeln!(message, "- {dependency}")?; - } - } - } - - Ok(message) - } - - fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { - let worktree = project.read(cx).worktrees(cx).next()?; - let worktree = worktree.read(cx); - let entry = worktree.entry_for_path("Cargo.toml")?; - let path = ProjectPath { - worktree_id: worktree.id(), - path: entry.path.clone(), - }; - Some(Arc::from( - project.read(cx).absolute_path(&path, cx)?.as_path(), - )) + pub fn new(prompt_builder: Arc) -> Self { + Self { prompt_builder } } } @@ -93,12 +42,20 @@ impl SlashCommand for ProjectSlashCommand { "project".into() } + fn label(&self, cx: &AppContext) -> CodeLabel { + create_label_for_command("project", &[], cx) + } + fn description(&self) -> String { - "insert project metadata".into() + "Generate a semantic search based on context".into() } fn menu_text(&self) -> String { - "Insert Project Metadata".into() + self.description() + } + + fn requires_argument(&self) -> bool { + false } fn complete_argument( @@ -108,43 +65,126 @@ impl SlashCommand for ProjectSlashCommand { _workspace: Option>, _cx: &mut WindowContext, ) -> Task>> { - Task::ready(Err(anyhow!("this command does not require argument"))) - } - - fn requires_argument(&self) -> bool { - false + Task::ready(Ok(Vec::new())) } fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: language::BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, ) -> Task> { - let output = workspace.update(cx, |workspace, cx| { - let project = workspace.project().clone(); - let fs = workspace.project().read(cx).fs().clone(); - let path = Self::path_to_cargo_toml(project, cx); - let output = cx.background_executor().spawn(async move { - let path = path.with_context(|| "Cargo.toml not found")?; - Self::build_message(fs, &path).await - }); - - cx.foreground_executor().spawn(async move { - let text = output.await?; - let range = 0..text.len(); - Ok(SlashCommandOutput { - text, - sections: vec![SlashCommandOutputSection { - range, - icon: IconName::FileTree, - label: "Project".into(), - }], - run_commands_in_text: false, + let model_registry = LanguageModelRegistry::read_global(cx); + let current_model = model_registry.active_model(); + let prompt_builder = self.prompt_builder.clone(); + + let Some(workspace) = workspace.upgrade() else { + return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); + }; + let project = workspace.read(cx).project().clone(); + let fs = project.read(cx).fs().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow::anyhow!("no project indexer"))); + }; + + cx.spawn(|mut cx| async move { + let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?; + + let prompt = + prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?; + + let search_queries = current_model + .use_tool::( + language_model::LanguageModelRequest { + messages: vec![language_model::LanguageModelRequestMessage { + role: language_model::Role::User, + content: vec![language_model::MessageContent::Text(prompt)], + cache: false, + }], + tools: vec![], + stop: vec![], + temperature: None, + }, + cx.deref_mut(), + ) + .await? + .search_queries; + + let results = project_index + .read_with(&cx, |project_index, cx| { + project_index.search(search_queries.clone(), 25, cx) + })? + .await?; + + let results = SemanticDb::load_results(results, &fs, &cx).await?; + + cx.background_executor() + .spawn(async move { + let mut output = "Project context:\n".to_string(); + let mut sections = Vec::new(); + + for (ix, query) in search_queries.into_iter().enumerate() { + let start_ix = output.len(); + writeln!(&mut output, "Results for {query}:").unwrap(); + let mut has_results = false; + for result in &results { + if result.query_index == ix { + add_search_result_section(result, &mut output, &mut sections); + has_results = true; + } + } + if has_results { + sections.push(SlashCommandOutputSection { + range: start_ix..output.len(), + icon: IconName::MagnifyingGlass, + label: query.into(), + metadata: None, + }); + output.push('\n'); + } else { + output.truncate(start_ix); + } + } + + sections.push(SlashCommandOutputSection { + range: 0..output.len(), + icon: IconName::Book, + label: "Project context".into(), + metadata: None, + }); + + Ok(SlashCommandOutput { + text: output, + sections, + run_commands_in_text: true, + }) }) - }) - }); - output.unwrap_or_else(|error| Task::ready(Err(error))) + .await + }) + } +} + +#[derive(JsonSchema, Deserialize)] +struct SearchQueries { + /// An array of semantic search queries. + /// + /// These queries will be used to search the user's codebase. + /// The function can only accept 4 queries, otherwise it will error. + /// As such, it's important that you limit the length of the search_queries array to 5 queries or less. + search_queries: Vec, +} + +impl LanguageModelTool for SearchQueries { + fn name() -> String { + "search_queries".to_string() + } + + fn description() -> String { + "Generate semantic search queries based on context".to_string() } } diff --git a/crates/assistant/src/slash_command/prompt_after_summary.txt b/crates/assistant/src/slash_command/prompt_after_summary.txt new file mode 100644 index 00000000000000..fc139a1fcb0c15 --- /dev/null +++ b/crates/assistant/src/slash_command/prompt_after_summary.txt @@ -0,0 +1,24 @@ +Actions have a cost, so only include actions that you think +will be helpful to you in doing a great job answering the +prompt in the future. + +You must respond ONLY with a list of actions you would like to +perform. Each action should be on its own line, and followed by a space and then its parameter. + +Actions can be performed more than once with different parameters. +Here is an example valid response: + +``` +file path/to/my/file.txt +file path/to/another/file.txt +search something to search for +search something else to search for +``` + +Once again, do not forget: you must respond ONLY in the format of +one action per line, and the action name should be followed by +its parameter. Your response must not include anything other +than a list of actions, with one action per line, in this format. +It is extremely important that you do not deviate from this format even slightly! + +This is the end of my instructions for how to respond. The rest is the prompt: diff --git a/crates/assistant/src/slash_command/prompt_before_summary.txt b/crates/assistant/src/slash_command/prompt_before_summary.txt new file mode 100644 index 00000000000000..5d8db1b8f7903f --- /dev/null +++ b/crates/assistant/src/slash_command/prompt_before_summary.txt @@ -0,0 +1,31 @@ +I'm going to give you a prompt. I don't want you to respond +to the prompt itself. I want you to figure out which of the following +actions on my project, if any, would help you answer the prompt. + +Here are the actions: + +## file + +This action's parameter is a file path to one of the files +in the project. If you ask for this action, I will tell you +the full contents of the file, so you can learn all the +details of the file. + +## search + +This action's parameter is a string to do a semantic search for +across the files in the project. (You will have a JSON summary +of all the files in the project.) It will tell you which files this string +(or similar strings; it is a semantic search) appear in, +as well as some context of the lines surrounding each result. +It's very important that you only use this action when you think +that searching across the specific files in this project for the query +in question will be useful. For example, don't use this command to search +for queries you might put into a general Web search engine, because those +will be too general to give useful results in this project-specific search. + +--- + +That was the end of the list of actions. + +Here is a JSON summary of each of the files in my project: diff --git a/crates/assistant/src/slash_command/prompt_command.rs b/crates/assistant/src/slash_command/prompt_command.rs index 4d64bba2edb740..978c6d7504caeb 100644 --- a/crates/assistant/src/slash_command/prompt_command.rs +++ b/crates/assistant/src/slash_command/prompt_command.rs @@ -3,7 +3,7 @@ use crate::prompt_library::PromptStore; use anyhow::{anyhow, Context, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use std::sync::{atomic::AtomicBool, Arc}; use ui::prelude::*; use workspace::Workspace; @@ -16,11 +16,11 @@ impl SlashCommand for PromptSlashCommand { } fn description(&self) -> String { - "insert prompt from library".into() + "Insert prompt from library".into() } fn menu_text(&self) -> String { - "Insert Prompt from Library".into() + self.description() } fn requires_argument(&self) -> bool { @@ -56,6 +56,8 @@ impl SlashCommand for PromptSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -95,6 +97,7 @@ impl SlashCommand for PromptSlashCommand { range, icon: IconName::Library, label: title, + metadata: None, }], run_commands_in_text: true, }) diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index 4da8a5585f3824..c7183e95bbc853 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -7,15 +7,13 @@ use anyhow::Result; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use feature_flags::FeatureFlag; use gpui::{AppContext, Task, WeakView}; -use language::{CodeLabel, LineEnding, LspAdapterDelegate}; -use semantic_index::SemanticIndex; +use language::{CodeLabel, LspAdapterDelegate}; +use semantic_index::{LoadedSearchResult, SemanticDb}; use std::{ fmt::Write, - path::PathBuf, sync::{atomic::AtomicBool, Arc}, }; use ui::{prelude::*, IconName}; -use util::ResultExt; use workspace::Workspace; pub(crate) struct SearchSlashCommandFeatureFlag; @@ -36,11 +34,11 @@ impl SlashCommand for SearchSlashCommand { } fn description(&self) -> String { - "semantic search".into() + "Search your project semantically".into() } fn menu_text(&self) -> String { - "Semantic Search".into() + self.description() } fn requires_argument(&self) -> bool { @@ -60,6 +58,8 @@ impl SlashCommand for SearchSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: language::BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -92,72 +92,28 @@ impl SlashCommand for SearchSlashCommand { let project = workspace.read(cx).project().clone(); let fs = project.read(cx).fs().clone(); - let project_index = - cx.update_global(|index: &mut SemanticIndex, cx| index.project_index(project, cx)); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow::anyhow!("no project indexer"))); + }; cx.spawn(|cx| async move { let results = project_index .read_with(&cx, |project_index, cx| { - project_index.search(query.clone(), limit.unwrap_or(5), cx) + project_index.search(vec![query.clone()], limit.unwrap_or(5), cx) })? .await?; - let mut loaded_results = Vec::new(); - for result in results { - let (full_path, file_content) = - result.worktree.read_with(&cx, |worktree, _cx| { - let entry_abs_path = worktree.abs_path().join(&result.path); - let mut entry_full_path = PathBuf::from(worktree.root_name()); - entry_full_path.push(&result.path); - let file_content = async { - let entry_abs_path = entry_abs_path; - fs.load(&entry_abs_path).await - }; - (entry_full_path, file_content) - })?; - if let Some(file_content) = file_content.await.log_err() { - loaded_results.push((result, full_path, file_content)); - } - } + let loaded_results = SemanticDb::load_results(results, &fs, &cx).await?; let output = cx .background_executor() .spawn(async move { let mut text = format!("Search results for {query}:\n"); let mut sections = Vec::new(); - for (result, full_path, file_content) in loaded_results { - let range_start = result.range.start.min(file_content.len()); - let range_end = result.range.end.min(file_content.len()); - - let start_row = file_content[0..range_start].matches('\n').count() as u32; - let end_row = file_content[0..range_end].matches('\n').count() as u32; - let start_line_byte_offset = file_content[0..range_start] - .rfind('\n') - .map(|pos| pos + 1) - .unwrap_or_default(); - let end_line_byte_offset = file_content[range_end..] - .find('\n') - .map(|pos| range_end + pos) - .unwrap_or_else(|| file_content.len()); - - let section_start_ix = text.len(); - text.push_str(&codeblock_fence_for_path( - Some(&result.path), - Some(start_row..end_row), - )); - - let mut excerpt = - file_content[start_line_byte_offset..end_line_byte_offset].to_string(); - LineEnding::normalize(&mut excerpt); - text.push_str(&excerpt); - writeln!(text, "\n```\n").unwrap(); - let section_end_ix = text.len() - 1; - sections.push(build_entry_output_section( - section_start_ix..section_end_ix, - Some(&full_path), - false, - Some(start_row + 1..end_row + 1), - )); + for loaded_result in &loaded_results { + add_search_result_section(loaded_result, &mut text, &mut sections); } let query = SharedString::from(query); @@ -165,6 +121,7 @@ impl SlashCommand for SearchSlashCommand { range: 0..text.len(), icon: IconName::MagnifyingGlass, label: query, + metadata: None, }); SlashCommandOutput { @@ -179,3 +136,35 @@ impl SlashCommand for SearchSlashCommand { }) } } + +pub fn add_search_result_section( + loaded_result: &LoadedSearchResult, + text: &mut String, + sections: &mut Vec>, +) { + let LoadedSearchResult { + path, + full_path, + excerpt_content, + row_range, + .. + } = loaded_result; + let section_start_ix = text.len(); + text.push_str(&codeblock_fence_for_path( + Some(&path), + Some(row_range.clone()), + )); + + text.push_str(&excerpt_content); + if !text.ends_with('\n') { + text.push('\n'); + } + writeln!(text, "```\n").unwrap(); + let section_end_ix = text.len() - 1; + sections.push(build_entry_output_section( + section_start_ix..section_end_ix, + Some(&full_path), + false, + Some(row_range.start() + 1..row_range.end() + 1), + )); +} diff --git a/crates/assistant/src/slash_command/symbols_command.rs b/crates/assistant/src/slash_command/symbols_command.rs index c9582f2882472d..887b57ba9956c7 100644 --- a/crates/assistant/src/slash_command/symbols_command.rs +++ b/crates/assistant/src/slash_command/symbols_command.rs @@ -3,7 +3,7 @@ use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use editor::Editor; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use std::sync::Arc; use std::{path::Path, sync::atomic::AtomicBool}; use ui::{IconName, WindowContext}; @@ -17,11 +17,11 @@ impl SlashCommand for OutlineSlashCommand { } fn description(&self) -> String { - "insert symbols for active tab".into() + "Insert symbols for active tab".into() } fn menu_text(&self) -> String { - "Insert Symbols for Active Tab".into() + self.description() } fn complete_argument( @@ -41,6 +41,8 @@ impl SlashCommand for OutlineSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -77,6 +79,7 @@ impl SlashCommand for OutlineSlashCommand { range: 0..outline_text.len(), icon: IconName::ListTree, label: path.to_string_lossy().to_string().into(), + metadata: None, }], text: outline_text, run_commands_in_text: false, diff --git a/crates/assistant/src/slash_command/tab_command.rs b/crates/assistant/src/slash_command/tab_command.rs index 1a6884b8538221..0bff4730d8e5c8 100644 --- a/crates/assistant/src/slash_command/tab_command.rs +++ b/crates/assistant/src/slash_command/tab_command.rs @@ -1,21 +1,17 @@ -use super::{ - diagnostics_command::write_single_file_diagnostics, - file_command::{build_entry_output_section, codeblock_fence_for_path}, - SlashCommand, SlashCommandOutput, -}; +use super::{file_command::append_buffer_to_output, SlashCommand, SlashCommandOutput}; use anyhow::{Context, Result}; -use assistant_slash_command::ArgumentCompletion; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use collections::{HashMap, HashSet}; use editor::Editor; use futures::future::join_all; use gpui::{Entity, Task, WeakView}; use language::{BufferSnapshot, CodeLabel, HighlightId, LspAdapterDelegate}; use std::{ - fmt::Write, path::PathBuf, sync::{atomic::AtomicBool, Arc}, }; use ui::{ActiveTheme, WindowContext}; +use util::ResultExt; use workspace::Workspace; pub(crate) struct TabSlashCommand; @@ -28,11 +24,11 @@ impl SlashCommand for TabSlashCommand { } fn description(&self) -> String { - "insert open tabs (active tab by default)".to_owned() + "Insert open tabs (active tab by default)".to_owned() } fn menu_text(&self) -> String { - "Insert Open Tabs".to_owned() + self.description() } fn requires_argument(&self) -> bool { @@ -131,6 +127,8 @@ impl SlashCommand for TabSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -144,40 +142,11 @@ impl SlashCommand for TabSlashCommand { ); cx.background_executor().spawn(async move { - let mut sections = Vec::new(); - let mut text = String::new(); - let mut has_diagnostics = false; + let mut output = SlashCommandOutput::default(); for (full_path, buffer, _) in tab_items_search.await? { - let section_start_ix = text.len(); - text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None)); - for chunk in buffer.as_rope().chunks() { - text.push_str(chunk); - } - if !text.ends_with('\n') { - text.push('\n'); - } - writeln!(text, "```").unwrap(); - if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) { - has_diagnostics = true; - } - if !text.ends_with('\n') { - text.push('\n'); - } - - let section_end_ix = text.len() - 1; - sections.push(build_entry_output_section( - section_start_ix..section_end_ix, - full_path.as_deref(), - false, - None, - )); + append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err(); } - - Ok(SlashCommandOutput { - text, - sections, - run_commands_in_text: has_diagnostics, - }) + Ok(output) }) } } diff --git a/crates/assistant/src/slash_command/terminal_command.rs b/crates/assistant/src/slash_command/terminal_command.rs index 04baabd39669bc..1d4959fb199572 100644 --- a/crates/assistant/src/slash_command/terminal_command.rs +++ b/crates/assistant/src/slash_command/terminal_command.rs @@ -6,7 +6,7 @@ use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, }; use gpui::{AppContext, Task, View, WeakView}; -use language::{CodeLabel, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate}; use terminal_view::{terminal_panel::TerminalPanel, TerminalView}; use ui::prelude::*; use workspace::{dock::Panel, Workspace}; @@ -29,11 +29,11 @@ impl SlashCommand for TerminalSlashCommand { } fn description(&self) -> String { - "insert terminal output".into() + "Insert terminal output".into() } fn menu_text(&self) -> String { - "Insert Terminal Output".into() + self.description() } fn requires_argument(&self) -> bool { @@ -57,6 +57,8 @@ impl SlashCommand for TerminalSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -91,6 +93,7 @@ impl SlashCommand for TerminalSlashCommand { range, icon: IconName::Terminal, label: "Terminal".into(), + metadata: None, }], run_commands_in_text: false, })) diff --git a/crates/assistant/src/slash_command/workflow_command.rs b/crates/assistant/src/slash_command/workflow_command.rs index f588fe848d5b32..50c0e6cbc60e60 100644 --- a/crates/assistant/src/slash_command/workflow_command.rs +++ b/crates/assistant/src/slash_command/workflow_command.rs @@ -8,7 +8,7 @@ use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, }; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use workspace::Workspace; @@ -18,6 +18,8 @@ pub(crate) struct WorkflowSlashCommand { } impl WorkflowSlashCommand { + pub const NAME: &'static str = "workflow"; + pub fn new(prompt_builder: Arc) -> Self { Self { prompt_builder } } @@ -25,15 +27,15 @@ impl WorkflowSlashCommand { impl SlashCommand for WorkflowSlashCommand { fn name(&self) -> String { - "workflow".into() + Self::NAME.into() } fn description(&self) -> String { - "insert a prompt that opts into the edit workflow".into() + "Insert prompt to opt into the edit workflow".into() } fn menu_text(&self) -> String { - "Insert Workflow Prompt".into() + self.description() } fn requires_argument(&self) -> bool { @@ -53,6 +55,8 @@ impl SlashCommand for WorkflowSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -68,6 +72,7 @@ impl SlashCommand for WorkflowSlashCommand { range, icon: IconName::Route, label: "Workflow".into(), + metadata: None, }], run_commands_in_text: false, }) diff --git a/crates/assistant/src/slash_command_picker.rs b/crates/assistant/src/slash_command_picker.rs index 4b57dcfb3306c5..58023848b0e508 100644 --- a/crates/assistant/src/slash_command_picker.rs +++ b/crates/assistant/src/slash_command_picker.rs @@ -184,7 +184,7 @@ impl PickerDelegate for SlashCommandDelegate { h_flex() .group(format!("command-entry-label-{ix}")) .w_full() - .min_w(px(220.)) + .min_w(px(250.)) .child( v_flex() .child( @@ -203,7 +203,9 @@ impl PickerDelegate for SlashCommandDelegate { div() .font_buffer(cx) .child( - Label::new(args).size(LabelSize::Small), + Label::new(args) + .size(LabelSize::Small) + .color(Color::Muted), ) .visible_on_hover(format!( "command-entry-label-{ix}" diff --git a/crates/assistant/src/slash_command_settings.rs b/crates/assistant/src/slash_command_settings.rs index eda950b6a222cf..5918769d711c3f 100644 --- a/crates/assistant/src/slash_command_settings.rs +++ b/crates/assistant/src/slash_command_settings.rs @@ -10,9 +10,9 @@ pub struct SlashCommandSettings { /// Settings for the `/docs` slash command. #[serde(default)] pub docs: DocsCommandSettings, - /// Settings for the `/project` slash command. + /// Settings for the `/cargo-workspace` slash command. #[serde(default)] - pub project: ProjectCommandSettings, + pub cargo_workspace: CargoWorkspaceCommandSettings, } /// Settings for the `/docs` slash command. @@ -23,10 +23,10 @@ pub struct DocsCommandSettings { pub enabled: bool, } -/// Settings for the `/project` slash command. +/// Settings for the `/cargo-workspace` slash command. #[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] -pub struct ProjectCommandSettings { - /// Whether `/project` is enabled. +pub struct CargoWorkspaceCommandSettings { + /// Whether `/cargo-workspace` is enabled. #[serde(default)] pub enabled: bool, } @@ -38,7 +38,10 @@ impl Settings for SlashCommandSettings { fn load(sources: SettingsSources, _cx: &mut AppContext) -> Result { SettingsSources::::json_merge_with( - [sources.default].into_iter().chain(sources.user), + [sources.default] + .into_iter() + .chain(sources.user) + .chain(sources.server), ) } } diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 479925b060633e..41b8d9eb88ac25 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -25,6 +25,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; +use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase}; use terminal::Terminal; use terminal_view::TerminalView; use theme::ThemeSettings; @@ -284,7 +285,7 @@ impl TerminalInlineAssistant { messages, tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }) } @@ -413,7 +414,7 @@ impl TerminalInlineAssist { struct InlineAssistantError; let id = - NotificationId::identified::( + NotificationId::composite::( assist_id.0, ); @@ -465,7 +466,8 @@ impl EventEmitter for PromptEditor {} impl Render for PromptEditor { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let buttons = match &self.codegen.read(cx).status { + let status = &self.codegen.read(cx).status; + let buttons = match status { CodegenStatus::Idle => { vec![ IconButton::new("cancel", IconName::Close) @@ -516,7 +518,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click(cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested))); - if self.edited_since_done { + let has_error = matches!(status, CodegenStatus::Error(_)); + if has_error || self.edited_since_done { vec![ cancel, IconButton::new("restart", IconName::RotateCw) @@ -568,7 +571,7 @@ impl Render for PromptEditor { .bg(cx.theme().colors().editor_background) .border_y_1() .border_color(cx.theme().status().info_border) - .py_1p5() + .py_2() .h_full() .w_full() .on_action(cx.listener(Self::confirm)) @@ -583,7 +586,7 @@ impl Render for PromptEditor { .gap_2() .child(ModelSelector::new( self.fs.clone(), - IconButton::new("context", IconName::SlidersAlt) + IconButton::new("context", IconName::SettingsAlt) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .icon_color(Color::Muted) @@ -947,12 +950,11 @@ impl PromptEditor { } else { cx.theme().colors().text }, - font_family: settings.ui_font.family.clone(), - font_features: settings.ui_font.features.clone(), - font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: rems(0.875).into(), - font_weight: settings.ui_font.weight, - line_height: relative(1.3), + font_family: settings.buffer_font.family.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size.into(), + font_weight: settings.buffer_font.weight, + line_height: relative(settings.buffer_line_height.value()), ..Default::default() }; EditorElement::new( @@ -988,7 +990,7 @@ impl TerminalTransaction { pub fn push(&mut self, hunk: String, cx: &mut AppContext) { // Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal - let input = hunk.replace(CARRIAGE_RETURN, " "); + let input = Self::sanitize_input(hunk); self.terminal .update(cx, |terminal, _| terminal.input(input)); } @@ -1003,6 +1005,10 @@ impl TerminalTransaction { terminal.input(CARRIAGE_RETURN.to_string()) }); } + + fn sanitize_input(input: String) -> String { + input.replace(['\r', '\n'], "") + } } pub struct Codegen { @@ -1034,6 +1040,7 @@ impl Codegen { self.transaction = Some(TerminalTransaction::start(self.terminal.clone())); self.generation = cx.spawn(|this, mut cx| async move { let model_telemetry_id = model.telemetry_id(); + let model_provider_id = model.provider_id(); let response = model.stream_completion_text(prompt, &cx).await; let generate = async { let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1); @@ -1058,13 +1065,16 @@ impl Codegen { let error_message = result.as_ref().err().map(|error| error.to_string()); if let Some(telemetry) = telemetry { - telemetry.report_assistant_event( - None, - telemetry_events::AssistantKind::Inline, - model_telemetry_id, + telemetry.report_assistant_event(AssistantEvent { + conversation_id: None, + kind: AssistantKind::Inline, + phase: AssistantPhase::Response, + model: model_telemetry_id, + model_provider: model_provider_id.to_string(), response_latency, error_message, - ); + language_name: None, + }); } result?; diff --git a/crates/assistant/src/workflow.rs b/crates/assistant/src/workflow.rs deleted file mode 100644 index 75c65ed0a78e42..00000000000000 --- a/crates/assistant/src/workflow.rs +++ /dev/null @@ -1,506 +0,0 @@ -use crate::{AssistantPanel, InlineAssistId, InlineAssistant}; -use anyhow::{anyhow, Context as _, Result}; -use collections::HashMap; -use editor::Editor; -use gpui::AsyncAppContext; -use gpui::{Model, Task, UpdateGlobal as _, View, WeakView, WindowContext}; -use language::{Buffer, BufferSnapshot}; -use project::{Project, ProjectPath}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::{ops::Range, path::Path, sync::Arc}; -use text::Bias; -use workspace::Workspace; - -#[derive(Debug)] -pub(crate) struct WorkflowStep { - pub range: Range, - pub leading_tags_end: text::Anchor, - pub trailing_tag_start: Option, - pub edits: Arc<[Result]>, - pub resolution_task: Option>, - pub resolution: Option>>, -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct WorkflowStepEdit { - pub path: String, - pub kind: WorkflowStepEditKind, -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub(crate) struct WorkflowStepResolution { - pub title: String, - pub suggestion_groups: HashMap, Vec>, -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct WorkflowSuggestionGroup { - pub context_range: Range, - pub suggestions: Vec, -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum WorkflowSuggestion { - Update { - range: Range, - description: String, - }, - CreateFile { - description: String, - }, - InsertBefore { - position: language::Anchor, - description: String, - }, - InsertAfter { - position: language::Anchor, - description: String, - }, - Delete { - range: Range, - }, -} - -impl WorkflowSuggestion { - pub fn range(&self) -> Range { - match self { - Self::Update { range, .. } => range.clone(), - Self::CreateFile { .. } => language::Anchor::MIN..language::Anchor::MAX, - Self::InsertBefore { position, .. } | Self::InsertAfter { position, .. } => { - *position..*position - } - Self::Delete { range, .. } => range.clone(), - } - } - - pub fn description(&self) -> Option<&str> { - match self { - Self::Update { description, .. } - | Self::CreateFile { description } - | Self::InsertBefore { description, .. } - | Self::InsertAfter { description, .. } => Some(description), - Self::Delete { .. } => None, - } - } - - fn description_mut(&mut self) -> Option<&mut String> { - match self { - Self::Update { description, .. } - | Self::CreateFile { description } - | Self::InsertBefore { description, .. } - | Self::InsertAfter { description, .. } => Some(description), - Self::Delete { .. } => None, - } - } - - pub fn try_merge(&mut self, other: &Self, buffer: &BufferSnapshot) -> bool { - let range = self.range(); - let other_range = other.range(); - - // Don't merge if we don't contain the other suggestion. - if range.start.cmp(&other_range.start, buffer).is_gt() - || range.end.cmp(&other_range.end, buffer).is_lt() - { - return false; - } - - if let Some(description) = self.description_mut() { - if let Some(other_description) = other.description() { - description.push('\n'); - description.push_str(other_description); - } - } - true - } - - pub fn show( - &self, - editor: &View, - excerpt_id: editor::ExcerptId, - workspace: &WeakView, - assistant_panel: &View, - cx: &mut WindowContext, - ) -> Option { - let mut initial_transaction_id = None; - let initial_prompt; - let suggestion_range; - let buffer = editor.read(cx).buffer().clone(); - let snapshot = buffer.read(cx).snapshot(cx); - - match self { - Self::Update { - range, description, .. - } => { - initial_prompt = description.clone(); - suggestion_range = snapshot.anchor_in_excerpt(excerpt_id, range.start)? - ..snapshot.anchor_in_excerpt(excerpt_id, range.end)?; - } - Self::CreateFile { description } => { - initial_prompt = description.clone(); - suggestion_range = editor::Anchor::min()..editor::Anchor::min(); - } - Self::InsertBefore { - position, - description, - .. - } => { - let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?; - initial_prompt = description.clone(); - suggestion_range = buffer.update(cx, |buffer, cx| { - buffer.start_transaction(cx); - let line_start = buffer.insert_empty_line(position, true, true, cx); - initial_transaction_id = buffer.end_transaction(cx); - buffer.refresh_preview(cx); - - let line_start = buffer.read(cx).anchor_before(line_start); - line_start..line_start - }); - } - Self::InsertAfter { - position, - description, - .. - } => { - let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?; - initial_prompt = description.clone(); - suggestion_range = buffer.update(cx, |buffer, cx| { - buffer.start_transaction(cx); - let line_start = buffer.insert_empty_line(position, true, true, cx); - initial_transaction_id = buffer.end_transaction(cx); - buffer.refresh_preview(cx); - - let line_start = buffer.read(cx).anchor_before(line_start); - line_start..line_start - }); - } - Self::Delete { range, .. } => { - initial_prompt = "Delete".to_string(); - suggestion_range = snapshot.anchor_in_excerpt(excerpt_id, range.start)? - ..snapshot.anchor_in_excerpt(excerpt_id, range.end)?; - } - } - - InlineAssistant::update_global(cx, |inline_assistant, cx| { - Some(inline_assistant.suggest_assist( - editor, - suggestion_range, - initial_prompt, - initial_transaction_id, - Some(workspace.clone()), - Some(assistant_panel), - cx, - )) - }) - } -} - -impl WorkflowStepEdit { - pub fn new( - path: Option, - operation: Option, - search: Option, - description: Option, - ) -> Result { - let path = path.ok_or_else(|| anyhow!("missing path"))?; - let operation = operation.ok_or_else(|| anyhow!("missing operation"))?; - - let kind = match operation.as_str() { - "update" => WorkflowStepEditKind::Update { - search: search.ok_or_else(|| anyhow!("missing search"))?, - description: description.ok_or_else(|| anyhow!("missing description"))?, - }, - "insert_before" => WorkflowStepEditKind::InsertBefore { - search: search.ok_or_else(|| anyhow!("missing search"))?, - description: description.ok_or_else(|| anyhow!("missing description"))?, - }, - "insert_after" => WorkflowStepEditKind::InsertAfter { - search: search.ok_or_else(|| anyhow!("missing search"))?, - description: description.ok_or_else(|| anyhow!("missing description"))?, - }, - "delete" => WorkflowStepEditKind::Delete { - search: search.ok_or_else(|| anyhow!("missing search"))?, - }, - "create" => WorkflowStepEditKind::Create { - description: description.ok_or_else(|| anyhow!("missing description"))?, - }, - _ => Err(anyhow!("unknown operation {operation:?}"))?, - }; - - Ok(Self { path, kind }) - } - - pub async fn resolve( - &self, - project: Model, - mut cx: AsyncAppContext, - ) -> Result<(Model, super::WorkflowSuggestion)> { - let path = self.path.clone(); - let kind = self.kind.clone(); - let buffer = project - .update(&mut cx, |project, cx| { - let project_path = project - .find_project_path(Path::new(&path), cx) - .or_else(|| { - // If we couldn't find a project path for it, put it in the active worktree - // so that when we create the buffer, it can be saved. - let worktree = project - .active_entry() - .and_then(|entry_id| project.worktree_for_entry(entry_id, cx)) - .or_else(|| project.worktrees(cx).next())?; - let worktree = worktree.read(cx); - - Some(ProjectPath { - worktree_id: worktree.id(), - path: Arc::from(Path::new(&path)), - }) - }) - .with_context(|| format!("worktree not found for {:?}", path))?; - anyhow::Ok(project.open_buffer(project_path, cx)) - })?? - .await?; - - let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?; - let suggestion = cx - .background_executor() - .spawn(async move { - match kind { - WorkflowStepEditKind::Update { - search, - description, - } => { - let range = Self::resolve_location(&snapshot, &search); - WorkflowSuggestion::Update { range, description } - } - WorkflowStepEditKind::Create { description } => { - WorkflowSuggestion::CreateFile { description } - } - WorkflowStepEditKind::InsertBefore { - search, - description, - } => { - let range = Self::resolve_location(&snapshot, &search); - WorkflowSuggestion::InsertBefore { - position: range.start, - description, - } - } - WorkflowStepEditKind::InsertAfter { - search, - description, - } => { - let range = Self::resolve_location(&snapshot, &search); - WorkflowSuggestion::InsertAfter { - position: range.end, - description, - } - } - WorkflowStepEditKind::Delete { search } => { - let range = Self::resolve_location(&snapshot, &search); - WorkflowSuggestion::Delete { range } - } - } - }) - .await; - - Ok((buffer, suggestion)) - } - - fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range { - const INSERTION_SCORE: f64 = -1.0; - const DELETION_SCORE: f64 = -1.0; - const REPLACEMENT_SCORE: f64 = -1.0; - const EQUALITY_SCORE: f64 = 5.0; - - struct Matrix { - cols: usize, - data: Vec, - } - - impl Matrix { - fn new(rows: usize, cols: usize) -> Self { - Matrix { - cols, - data: vec![0.0; rows * cols], - } - } - - fn get(&self, row: usize, col: usize) -> f64 { - self.data[row * self.cols + col] - } - - fn set(&mut self, row: usize, col: usize, value: f64) { - self.data[row * self.cols + col] = value; - } - } - - let buffer_len = buffer.len(); - let query_len = search_query.len(); - let mut matrix = Matrix::new(query_len + 1, buffer_len + 1); - - for (i, query_byte) in search_query.bytes().enumerate() { - for (j, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() { - let match_score = if query_byte == *buffer_byte { - EQUALITY_SCORE - } else { - REPLACEMENT_SCORE - }; - let up = matrix.get(i + 1, j) + DELETION_SCORE; - let left = matrix.get(i, j + 1) + INSERTION_SCORE; - let diagonal = matrix.get(i, j) + match_score; - let score = up.max(left.max(diagonal)).max(0.); - matrix.set(i + 1, j + 1, score); - } - } - - // Traceback to find the best match - let mut best_buffer_end = buffer_len; - let mut best_score = 0.0; - for col in 1..=buffer_len { - let score = matrix.get(query_len, col); - if score > best_score { - best_score = score; - best_buffer_end = col; - } - } - - let mut query_ix = query_len; - let mut buffer_ix = best_buffer_end; - while query_ix > 0 && buffer_ix > 0 { - let current = matrix.get(query_ix, buffer_ix); - let up = matrix.get(query_ix - 1, buffer_ix); - let left = matrix.get(query_ix, buffer_ix - 1); - if current == left + INSERTION_SCORE { - buffer_ix -= 1; - } else if current == up + DELETION_SCORE { - query_ix -= 1; - } else { - query_ix -= 1; - buffer_ix -= 1; - } - } - - let mut start = buffer.offset_to_point(buffer.clip_offset(buffer_ix, Bias::Left)); - start.column = 0; - let mut end = buffer.offset_to_point(buffer.clip_offset(best_buffer_end, Bias::Right)); - end.column = buffer.line_len(end.row); - - buffer.anchor_after(start)..buffer.anchor_before(end) - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] -#[serde(tag = "operation")] -pub enum WorkflowStepEditKind { - /// Rewrites the specified text entirely based on the given description. - /// This operation completely replaces the given text. - Update { - /// A string in the source text to apply the update to. - search: String, - /// A brief description of the transformation to apply to the symbol. - description: String, - }, - /// Creates a new file with the given path based on the provided description. - /// This operation adds a new file to the codebase. - Create { - /// A brief description of the file to be created. - description: String, - }, - /// Inserts text before the specified text in the source file. - InsertBefore { - /// A string in the source text to insert text before. - search: String, - /// A brief description of how the new text should be generated. - description: String, - }, - /// Inserts text after the specified text in the source file. - InsertAfter { - /// A string in the source text to insert text after. - search: String, - /// A brief description of how the new text should be generated. - description: String, - }, - /// Deletes the specified symbol from the containing file. - Delete { - /// A string in the source text to delete. - search: String, - }, -} - -#[cfg(test)] -mod tests { - use super::*; - use gpui::{AppContext, Context}; - use text::{OffsetRangeExt, Point}; - - #[gpui::test] - fn test_resolve_location(cx: &mut AppContext) { - { - let buffer = cx.new_model(|cx| { - Buffer::local( - concat!( - " Lorem\n", - " ipsum\n", - " dolor sit amet\n", - " consecteur", - ), - cx, - ) - }); - let snapshot = buffer.read(cx).snapshot(); - assert_eq!( - WorkflowStepEdit::resolve_location(&snapshot, "ipsum\ndolor").to_point(&snapshot), - Point::new(1, 0)..Point::new(2, 18) - ); - } - - { - let buffer = cx.new_model(|cx| { - Buffer::local( - concat!( - "fn foo1(a: usize) -> usize {\n", - " 42\n", - "}\n", - "\n", - "fn foo2(b: usize) -> usize {\n", - " 42\n", - "}\n", - ), - cx, - ) - }); - let snapshot = buffer.read(cx).snapshot(); - assert_eq!( - WorkflowStepEdit::resolve_location(&snapshot, "fn foo1(b: usize) {\n42\n}") - .to_point(&snapshot), - Point::new(0, 0)..Point::new(2, 1) - ); - } - - { - let buffer = cx.new_model(|cx| { - Buffer::local( - concat!( - "fn main() {\n", - " Foo\n", - " .bar()\n", - " .baz()\n", - " .qux()\n", - "}\n", - "\n", - "fn foo2(b: usize) -> usize {\n", - " 42\n", - "}\n", - ), - cx, - ) - }); - let snapshot = buffer.read(cx).snapshot(); - assert_eq!( - WorkflowStepEdit::resolve_location(&snapshot, "Foo.bar.baz.qux()") - .to_point(&snapshot), - Point::new(1, 0)..Point::new(4, 14) - ); - } - } -} diff --git a/crates/assistant_slash_command/Cargo.toml b/crates/assistant_slash_command/Cargo.toml index 3d764bb0be9d82..a58a84312fc3e2 100644 --- a/crates/assistant_slash_command/Cargo.toml +++ b/crates/assistant_slash_command/Cargo.toml @@ -19,4 +19,5 @@ gpui.workspace = true language.workspace = true parking_lot.workspace = true serde.workspace = true +serde_json.workspace = true workspace.workspace = true diff --git a/crates/assistant_slash_command/src/assistant_slash_command.rs b/crates/assistant_slash_command/src/assistant_slash_command.rs index c5dece11ca59a8..36e229d49a246d 100644 --- a/crates/assistant_slash_command/src/assistant_slash_command.rs +++ b/crates/assistant_slash_command/src/assistant_slash_command.rs @@ -2,7 +2,7 @@ mod slash_command_registry; use anyhow::Result; use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext}; -use language::{CodeLabel, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt}; use serde::{Deserialize, Serialize}; pub use slash_command_registry::*; use std::{ @@ -77,6 +77,8 @@ pub trait SlashCommand: 'static + Send + Sync { fn run( self: Arc, arguments: &[String], + context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: BufferSnapshot, workspace: WeakView, // TODO: We're just using the `LspAdapterDelegate` here because that is // what the extension API is already expecting. @@ -94,7 +96,7 @@ pub type RenderFoldPlaceholder = Arc< + Fn(ElementId, Arc, &mut WindowContext) -> AnyElement, >; -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq)] pub struct SlashCommandOutput { pub text: String, pub sections: Vec>, @@ -106,4 +108,11 @@ pub struct SlashCommandOutputSection { pub range: Range, pub icon: IconName, pub label: SharedString, + pub metadata: Option, +} + +impl SlashCommandOutputSection { + pub fn is_valid(&self, buffer: &language::TextBuffer) -> bool { + self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty() + } } diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index bfe22de1f07482..9502b58f93274e 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -18,5 +18,5 @@ collections.workspace = true derive_more.workspace = true gpui.workspace = true parking_lot.workspace = true -rodio = { version = "0.17.1", default-features = false, features = ["wav"] } +rodio = { version = "0.19.0", default-features = false, features = ["wav"] } util.workspace = true diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml index 12e669780d83d7..1e08c9a768b0d5 100644 --- a/crates/auto_update/Cargo.toml +++ b/crates/auto_update/Cargo.toml @@ -19,7 +19,6 @@ db.workspace = true editor.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true log.workspace = true markdown_preview.workspace = true menu.workspace = true diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 8063ff4c40fca3..d501e6d93fdc11 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -9,7 +9,6 @@ use gpui::{ actions, AppContext, AsyncAppContext, Context as _, Global, Model, ModelContext, SemanticVersion, SharedString, Task, View, ViewContext, VisualContext, WindowContext, }; -use isahc::AsyncBody; use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView}; use schemars::JsonSchema; @@ -20,7 +19,7 @@ use smol::{fs, io::AsyncReadExt}; use settings::{Settings, SettingsSources, SettingsStore}; use smol::{fs::File, process::Command}; -use http_client::{HttpClient, HttpClientWithUrl}; +use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use std::{ env::{ @@ -131,7 +130,7 @@ impl Settings for AutoUpdateSetting { type FileContent = Option; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { - let auto_update = [sources.release_channel, sources.user] + let auto_update = [sources.server, sources.release_channel, sources.user] .into_iter() .find_map(|value| value.copied().flatten()) .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); @@ -244,29 +243,44 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<( let auto_updater = AutoUpdater::get(cx)?; let release_channel = ReleaseChannel::try_global(cx)?; - if matches!( - release_channel, - ReleaseChannel::Stable | ReleaseChannel::Preview - ) { - let auto_updater = auto_updater.read(cx); - let release_channel = release_channel.dev_name(); - let current_version = auto_updater.current_version; - let url = &auto_updater - .http_client - .build_url(&format!("/releases/{release_channel}/{current_version}")); - cx.open_url(url); + match release_channel { + ReleaseChannel::Stable | ReleaseChannel::Preview => { + let auto_updater = auto_updater.read(cx); + let current_version = auto_updater.current_version; + let release_channel = release_channel.dev_name(); + let path = format!("/releases/{release_channel}/{current_version}"); + let url = &auto_updater.http_client.build_url(&path); + cx.open_url(url); + } + ReleaseChannel::Nightly => { + cx.open_url("https://github.com/zed-industries/zed/commits/nightly/"); + } + ReleaseChannel::Dev => { + cx.open_url("https://github.com/zed-industries/zed/commits/main/"); + } } - None } fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext) { let release_channel = ReleaseChannel::global(cx); + + let url = match release_channel { + ReleaseChannel::Nightly => Some("https://github.com/zed-industries/zed/commits/nightly/"), + ReleaseChannel::Dev => Some("https://github.com/zed-industries/zed/commits/main/"), + _ => None, + }; + + if let Some(url) = url { + cx.open_url(url); + return; + } + let version = AppVersion::global(cx).to_string(); let client = client::Client::global(cx).http_client(); let url = client.build_url(&format!( - "/api/release_notes/{}/{}", + "/api/release_notes/v2/{}/{}", release_channel.dev_name(), version )); @@ -343,15 +357,17 @@ pub fn notify_of_any_new_update(cx: &mut ViewContext) -> Option<()> { let should_show_notification = should_show_notification.await?; if should_show_notification { workspace.update(&mut cx, |workspace, cx| { + let workspace_handle = workspace.weak_handle(); workspace.show_notification( NotificationId::unique::(), cx, - |cx| cx.new_view(|_| UpdateNotification::new(version)), + |cx| cx.new_view(|_| UpdateNotification::new(version, workspace_handle)), ); - updater - .read(cx) - .set_should_show_update_notification(false, cx) - .detach_and_log_err(cx); + updater.update(cx, |updater, cx| { + updater + .set_should_show_update_notification(false, cx) + .detach_and_log_err(cx); + }); })?; } anyhow::Ok(()) @@ -448,6 +464,7 @@ impl AutoUpdater { smol::fs::create_dir_all(&platform_dir).await.ok(); let client = this.read_with(cx, |this, _| this.http_client.clone())?; + if smol::fs::metadata(&version_path).await.is_err() { log::info!("downloading zed-remote-server {os} {arch}"); download_remote_server_binary(&version_path, release, client, cx).await?; diff --git a/crates/auto_update/src/update_notification.rs b/crates/auto_update/src/update_notification.rs index 66028c24011995..7568a0eb1a94e7 100644 --- a/crates/auto_update/src/update_notification.rs +++ b/crates/auto_update/src/update_notification.rs @@ -1,13 +1,18 @@ use gpui::{ div, DismissEvent, EventEmitter, InteractiveElement, IntoElement, ParentElement, Render, - SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, + SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, WeakView, }; use menu::Cancel; use release_channel::ReleaseChannel; -use workspace::ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt}; +use util::ResultExt; +use workspace::{ + ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt}, + Workspace, +}; pub struct UpdateNotification { version: SemanticVersion, + workspace: WeakView, } impl EventEmitter for UpdateNotification {} @@ -41,7 +46,11 @@ impl Render for UpdateNotification { .child(Label::new("View the release notes")) .cursor_pointer() .on_click(cx.listener(|this, _, cx| { - crate::view_release_notes(&Default::default(), cx); + this.workspace + .update(cx, |workspace, cx| { + crate::view_release_notes_locally(workspace, cx); + }) + .log_err(); this.dismiss(&menu::Cancel, cx) })), ) @@ -49,8 +58,8 @@ impl Render for UpdateNotification { } impl UpdateNotification { - pub fn new(version: SemanticVersion) -> Self { - Self { version } + pub fn new(version: SemanticVersion, workspace: WeakView) -> Self { + Self { version, workspace } } pub fn dismiss(&mut self, _: &Cancel, cx: &mut ViewContext) { diff --git a/crates/breadcrumbs/src/breadcrumbs.rs b/crates/breadcrumbs/src/breadcrumbs.rs index 93ebfa06435843..09b29c0436f6ed 100644 --- a/crates/breadcrumbs/src/breadcrumbs.rs +++ b/crates/breadcrumbs/src/breadcrumbs.rs @@ -1,7 +1,7 @@ use editor::Editor; use gpui::{ - Element, EventEmitter, IntoElement, ParentElement, Render, StyledText, Subscription, - ViewContext, + Element, EventEmitter, FocusableView, IntoElement, ParentElement, Render, StyledText, + Subscription, ViewContext, }; use itertools::Itertools; use std::cmp; @@ -90,17 +90,30 @@ impl Render for Breadcrumbs { ButtonLike::new("toggle outline view") .child(breadcrumbs_stack) .style(ButtonStyle::Transparent) - .on_click(move |_, cx| { - if let Some(editor) = editor.upgrade() { - outline::toggle(editor, &editor::actions::ToggleOutline, cx) + .on_click({ + let editor = editor.clone(); + move |_, cx| { + if let Some(editor) = editor.upgrade() { + outline::toggle(editor, &editor::actions::ToggleOutline, cx) + } } }) - .tooltip(|cx| { - Tooltip::for_action( - "Show symbol outline", - &editor::actions::ToggleOutline, - cx, - ) + .tooltip(move |cx| { + if let Some(editor) = editor.upgrade() { + let focus_handle = editor.read(cx).focus_handle(cx); + Tooltip::for_action_in( + "Show symbol outline", + &editor::actions::ToggleOutline, + &focus_handle, + cx, + ) + } else { + Tooltip::for_action( + "Show symbol outline", + &editor::actions::ToggleOutline, + cx, + ) + } }), ), None => element diff --git a/crates/call/src/room.rs b/crates/call/src/room.rs index 12516685293c88..a637bfd43fdf65 100644 --- a/crates/call/src/room.rs +++ b/crates/call/src/room.rs @@ -1178,7 +1178,7 @@ impl Room { this.update(&mut cx, |this, cx| { this.joined_projects.retain(|project| { if let Some(project) = project.upgrade() { - !project.read(cx).is_disconnected() + !project.read(cx).is_disconnected(cx) } else { false } @@ -1200,6 +1200,7 @@ impl Room { room_id: self.id(), worktrees: vec![], dev_server_project_id: Some(dev_server_project_id.0), + is_ssh_project: false, }) } else { if let Some(project_id) = project.read(cx).remote_id() { @@ -1210,6 +1211,7 @@ impl Room { room_id: self.id(), worktrees: project.read(cx).worktree_metadata_protos(cx), dev_server_project_id: None, + is_ssh_project: project.read(cx).is_via_ssh(), }) }; diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index bf32185b22043e..0a4a259648bb74 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -5,8 +5,8 @@ use collections::HashMap; use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task}; use language::proto::serialize_version; use rpc::{ - proto::{self, AnyProtoClient, PeerId}, - TypedEnvelope, + proto::{self, PeerId}, + AnyProtoClient, TypedEnvelope, }; use std::{sync::Arc, time::Duration}; use text::BufferId; @@ -66,7 +66,7 @@ impl ChannelBuffer { let capability = channel_store.read(cx).channel_capability(channel.id); language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text) })?; - buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??; + buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; let subscription = client.subscribe_to_entity(channel.id.0)?; @@ -151,7 +151,7 @@ impl ChannelBuffer { cx.notify(); this.buffer .update(cx, |buffer, cx| buffer.apply_ops(ops, cx)) - })??; + })?; Ok(()) } @@ -171,11 +171,14 @@ impl ChannelBuffer { fn on_buffer_update( &mut self, _: Model, - event: &language::Event, + event: &language::BufferEvent, cx: &mut ModelContext, ) { match event { - language::Event::Operation(operation) => { + language::BufferEvent::Operation { + operation, + is_local: true, + } => { if *ZED_ALWAYS_ACTIVE { if let language::Operation::UpdateSelections { selections, .. } = operation { if selections.is_empty() { @@ -191,7 +194,7 @@ impl ChannelBuffer { }) .log_err(); } - language::Event::Edited => { + language::BufferEvent::Edited => { cx.emit(ChannelBufferEvent::BufferEdited); } _ => {} diff --git a/crates/channel/src/channel_chat.rs b/crates/channel/src/channel_chat.rs index 7cbc362ff3500b..e5b5b74c16262b 100644 --- a/crates/channel/src/channel_chat.rs +++ b/crates/channel/src/channel_chat.rs @@ -11,7 +11,7 @@ use gpui::{ AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel, }; use rand::prelude::*; -use rpc::proto::AnyProtoClient; +use rpc::AnyProtoClient; use std::{ ops::{ControlFlow, Range}, sync::Arc, @@ -332,7 +332,7 @@ impl ChannelChat { .update(&mut cx, |chat, cx| { if let Some(first_id) = chat.first_loaded_message_id() { if first_id <= message_id { - let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(); + let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(&()); let message_id = ChannelMessageId::Saved(message_id); cursor.seek(&message_id, Bias::Left, &()); return ControlFlow::Break( @@ -498,7 +498,7 @@ impl ChannelChat { } pub fn message(&self, ix: usize) -> &ChannelMessage { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); cursor.seek(&Count(ix), Bias::Right, &()); cursor.item().unwrap() } @@ -515,13 +515,13 @@ impl ChannelChat { } pub fn messages_in_range(&self, range: Range) -> impl Iterator { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); cursor.seek(&Count(range.start), Bias::Right, &()); cursor.take(range.len()) } pub fn pending_messages(&self) -> impl Iterator { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &()); cursor } @@ -589,11 +589,11 @@ impl ChannelChat { fn insert_messages(&mut self, messages: SumTree, cx: &mut ModelContext) { if let Some((first_message, last_message)) = messages.first().zip(messages.last()) { let nonces = messages - .cursor::<()>() + .cursor::<()>(&()) .map(|m| m.nonce) .collect::>(); - let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(); + let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(&()); let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &()); let start_ix = old_cursor.start().1 .0; let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &()); @@ -646,7 +646,7 @@ impl ChannelChat { } fn message_removed(&mut self, id: u64, cx: &mut ModelContext) { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &()); if let Some(item) = cursor.item() { if item.id == ChannelMessageId::Saved(id) { @@ -685,7 +685,7 @@ impl ChannelChat { edited_at: Option, cx: &mut ModelContext, ) { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); let mut messages = cursor.slice(&id, Bias::Left, &()); let ix = messages.summary().count; @@ -716,7 +716,7 @@ async fn messages_from_proto( cx: &mut AsyncAppContext, ) -> Result> { let messages = ChannelMessage::from_proto_vec(proto_messages, user_store, cx).await?; - let mut result = SumTree::new(); + let mut result = SumTree::default(); result.extend(messages, &()); Ok(result) } @@ -808,7 +808,7 @@ pub fn mentions_to_proto(mentions: &[(Range, UserId)]) -> Vec Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { ChannelMessageSummary { max_id: self.id, count: 1, @@ -825,6 +825,10 @@ impl Default for ChannelMessageId { impl sum_tree::Summary for ChannelMessageSummary { type Context = (); + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.max_id = summary.max_id; self.count += summary.count; @@ -832,6 +836,10 @@ impl sum_tree::Summary for ChannelMessageSummary { } impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) { debug_assert!(summary.max_id > *self); *self = summary.max_id; @@ -839,6 +847,10 @@ impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId { } impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for Count { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) { self.0 += summary.count; } diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index 9bd5fd564f29dc..fc5b12cfae1c39 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -1007,7 +1007,7 @@ impl ChannelStore { .into_iter() .map(language::proto::deserialize_operation) .collect::>>()?; - buffer.apply_ops(incoming_operations, cx)?; + buffer.apply_ops(incoming_operations, cx); anyhow::Ok(outgoing_operations) }) .log_err(); diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index a09deaaf943823..e69183d1ea98f7 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -58,27 +58,32 @@ struct Args { dev_server_token: Option, } -fn parse_path_with_position(argument_str: &str) -> Result { - let path = PathWithPosition::parse_str(argument_str); - let curdir = env::current_dir()?; - - let canonicalized = path.map_path(|path| match fs::canonicalize(&path) { - Ok(path) => Ok(path), - Err(e) => { - if let Some(mut parent) = path.parent() { - if parent == Path::new("") { - parent = &curdir - } - match fs::canonicalize(parent) { - Ok(parent) => Ok(parent.join(path.file_name().unwrap())), - Err(_) => Err(e), +fn parse_path_with_position(argument_str: &str) -> anyhow::Result { + let canonicalized = match Path::new(argument_str).canonicalize() { + Ok(existing_path) => PathWithPosition::from_path(existing_path), + Err(_) => { + let path = PathWithPosition::parse_str(argument_str); + let curdir = env::current_dir().context("reteiving current directory")?; + path.map_path(|path| match fs::canonicalize(&path) { + Ok(path) => Ok(path), + Err(e) => { + if let Some(mut parent) = path.parent() { + if parent == Path::new("") { + parent = &curdir + } + match fs::canonicalize(parent) { + Ok(parent) => Ok(parent.join(path.file_name().unwrap())), + Err(_) => Err(e), + } + } else { + Err(e) + } } - } else { - Err(e) - } + }) } - })?; - Ok(canonicalized.to_string(|path| path.display().to_string())) + .with_context(|| format!("parsing as path with position {argument_str}"))?, + }; + Ok(canonicalized.to_string(|path| path.to_string_lossy().to_string())) } fn main() -> Result<()> { diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 72ca8ffc2478fd..80e9a17ba8e421 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -18,12 +18,11 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup [dependencies] anyhow.workspace = true async-recursion = "0.3" -async-tungstenite = { workspace = true, features = ["async-std", "async-native-tls"] } +async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] } chrono = { workspace = true, features = ["serde"] } clock.workspace = true collections.workspace = true feature_flags.workspace = true -fs.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true @@ -34,7 +33,9 @@ parking_lot.workspace = true postage.workspace = true rand.workspace = true release_channel.workspace = true -rpc.workspace = true +rpc = { workspace = true, features = ["gpui"] } +rustls-native-certs.workspace = true +rustls.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 8787e2ed9675fe..4a42554ebfda8c 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -4,6 +4,7 @@ pub mod test; mod socks; pub mod telemetry; pub mod user; +pub mod zed_urls; use anyhow::{anyhow, bail, Context as _, Result}; use async_recursion::async_recursion; @@ -22,7 +23,6 @@ use gpui::{actions, AppContext, AsyncAppContext, Global, Model, Task, WeakModel} use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use parking_lot::RwLock; use postage::watch; -use proto::{AnyProtoClient, EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet}; use rand::prelude::*; use release_channel::{AppVersion, ReleaseChannel}; use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage}; @@ -142,6 +142,7 @@ impl Settings for ProxySettings { Ok(Self { proxy: sources .user + .or(sources.server) .and_then(|value| value.proxy.clone()) .or(sources.default.proxy.clone()), }) @@ -241,8 +242,6 @@ pub enum EstablishConnectionError { #[error("{0}")] Other(#[from] anyhow::Error), #[error("{0}")] - Http(#[from] http_client::Error), - #[error("{0}")] InvalidHeaderValue(#[from] async_tungstenite::tungstenite::http::header::InvalidHeaderValue), #[error("{0}")] Io(#[from] std::io::Error), @@ -397,7 +396,7 @@ pub struct PendingEntitySubscription { } impl PendingEntitySubscription { - pub fn set_model(mut self, model: &Model, cx: &mut AsyncAppContext) -> Subscription { + pub fn set_model(mut self, model: &Model, cx: &AsyncAppContext) -> Subscription { self.consumed = true; let mut handlers = self.client.handler_set.lock(); let id = (TypeId::of::(), self.remote_id); @@ -475,15 +474,21 @@ impl settings::Settings for TelemetrySettings { fn load(sources: SettingsSources, _: &mut AppContext) -> Result { Ok(Self { - diagnostics: sources.user.as_ref().and_then(|v| v.diagnostics).unwrap_or( - sources - .default - .diagnostics - .ok_or_else(Self::missing_default)?, - ), + diagnostics: sources + .user + .as_ref() + .or(sources.server.as_ref()) + .and_then(|v| v.diagnostics) + .unwrap_or( + sources + .default + .diagnostics + .ok_or_else(Self::missing_default)?, + ), metrics: sources .user .as_ref() + .or(sources.server.as_ref()) .and_then(|v| v.metrics) .unwrap_or(sources.default.metrics.ok_or_else(Self::missing_default)?), }) @@ -530,19 +535,13 @@ impl Client { } pub fn production(cx: &mut AppContext) -> Arc { - let user_agent = format!( - "Zed/{} ({}; {})", - AppVersion::global(cx), - std::env::consts::OS, - std::env::consts::ARCH - ); let clock = Arc::new(clock::RealSystemClock); - let http = Arc::new(HttpClientWithUrl::new( + let http = Arc::new(HttpClientWithUrl::new_uri( + cx.http_client(), &ClientSettings::get_global(cx).server_url, - Some(user_agent), - ProxySettings::get_global(cx).proxy.clone(), + cx.http_client().proxy().cloned(), )); - Self::new(clock, http.clone(), cx) + Self::new(clock, http, cx) } pub fn id(&self) -> u64 { @@ -1032,7 +1031,7 @@ impl Client { &self, http: Arc, release_channel: Option, - ) -> impl Future> { + ) -> impl Future> { #[cfg(any(test, feature = "test-support"))] let url_override = self.rpc_url.read().clone(); @@ -1126,7 +1125,7 @@ impl Client { // for us from the RPC URL. // // Among other things, it will generate and set a `Sec-WebSocket-Key` header for us. - let mut request = rpc_url.into_client_request()?; + let mut request = IntoClientRequest::into_client_request(rpc_url.as_str())?; // We then modify the request to add our desired headers. let request_headers = request.headers_mut(); @@ -1146,8 +1145,33 @@ impl Client { match url_scheme { Https => { + let client_config = { + let mut root_store = rustls::RootCertStore::empty(); + + let root_certs = rustls_native_certs::load_native_certs(); + for error in root_certs.errors { + log::warn!("error loading native certs: {:?}", error); + } + root_store.add_parsable_certificates( + &root_certs + .certs + .into_iter() + .map(|cert| cert.as_ref().to_owned()) + .collect::>(), + ); + rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(root_store) + .with_no_client_auth() + }; + let (stream, _) = - async_tungstenite::async_std::client_async_tls(request, stream).await?; + async_tungstenite::async_tls::client_async_tls_with_connector( + request, + stream, + Some(client_config.into()), + ) + .await?; Ok(Connection::new( stream .map_err(|error| anyhow!(error)) @@ -1606,6 +1630,10 @@ impl ProtoClient for Client { fn message_handler_set(&self) -> &parking_lot::Mutex { &self.handler_set } + + fn is_via_collab(&self) -> bool { + true + } } #[derive(Serialize, Deserialize)] @@ -1733,7 +1761,7 @@ impl CredentialsProvider for KeychainCredentialsProvider { } /// prefix for the zed:// url scheme -pub static ZED_URL_SCHEME: &str = "zed"; +pub const ZED_URL_SCHEME: &str = "zed"; /// Parses the given link into a Zed link. /// diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 860288038bd9e2..ba03255d54680b 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -1,12 +1,13 @@ mod event_coalescer; use crate::{ChannelId, TelemetrySettings}; +use anyhow::Result; use chrono::{DateTime, Utc}; use clock::SystemClock; use collections::{HashMap, HashSet}; use futures::Future; use gpui::{AppContext, BackgroundExecutor, Task}; -use http_client::{self, HttpClient, HttpClientWithUrl, Method}; +use http_client::{self, AsyncBody, HttpClient, HttpClientWithUrl, Method, Request}; use once_cell::sync::Lazy; use parking_lot::Mutex; use release_channel::ReleaseChannel; @@ -16,9 +17,9 @@ use std::io::Write; use std::{env, mem, path::PathBuf, sync::Arc, time::Duration}; use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System}; use telemetry_events::{ - ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CpuEvent, EditEvent, - EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, - MemoryEvent, ReplEvent, SettingEvent, + ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event, + EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent, + SettingEvent, }; use tempfile::NamedTempFile; #[cfg(not(debug_assertions))] @@ -37,9 +38,10 @@ pub struct Telemetry { struct TelemetryState { settings: TelemetrySettings, - metrics_id: Option>, // Per logged-in user + system_id: Option>, // Per system installation_id: Option>, // Per app installation (different for dev, nightly, preview, and stable) session_id: Option, // Per app launch + metrics_id: Option>, // Per logged-in user release_channel: Option<&'static str>, architecture: &'static str, events_queue: Vec, @@ -191,9 +193,10 @@ impl Telemetry { settings: *TelemetrySettings::get_global(cx), architecture: env::consts::ARCH, release_channel, + system_id: None, installation_id: None, - metrics_id: None, session_id: None, + metrics_id: None, events_queue: Vec::new(), flush_events_task: None, log_file: None, @@ -283,11 +286,13 @@ impl Telemetry { pub fn start( self: &Arc, + system_id: Option, installation_id: Option, session_id: String, - cx: &mut AppContext, + cx: &AppContext, ) { let mut state = self.state.lock(); + state.system_id = system_id.map(|id| id.into()); state.installation_id = installation_id.map(|id| id.into()); state.session_id = Some(session_id); state.app_version = release_channel::AppVersion::global(cx).to_string(); @@ -304,7 +309,10 @@ impl Telemetry { let refresh_kind = ProcessRefreshKind::new().with_cpu().with_memory(); let current_process = Pid::from_u32(std::process::id()); - system.refresh_process_specifics(current_process, refresh_kind); + system.refresh_processes_specifics( + sysinfo::ProcessesToUpdate::Some(&[current_process]), + refresh_kind, + ); // Waiting some amount of time before the first query is important to get a reasonable value // https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage @@ -314,7 +322,10 @@ impl Telemetry { smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await; let current_process = Pid::from_u32(std::process::id()); - system.refresh_process_specifics(current_process, refresh_kind); + system.refresh_processes_specifics( + sysinfo::ProcessesToUpdate::Some(&[current_process]), + refresh_kind, + ); let Some(process) = system.process(current_process) else { log::error!( "Failed to find own process {current_process:?} in system process table" @@ -354,6 +365,7 @@ impl Telemetry { operation: &'static str, copilot_enabled: bool, copilot_enabled_for_language: bool, + is_via_ssh: bool, ) { let event = Event::Editor(EditorEvent { file_extension, @@ -361,6 +373,7 @@ impl Telemetry { operation: operation.into(), copilot_enabled, copilot_enabled_for_language, + is_via_ssh, }); self.report_event(event) @@ -381,23 +394,8 @@ impl Telemetry { self.report_event(event) } - pub fn report_assistant_event( - self: &Arc, - conversation_id: Option, - kind: AssistantKind, - model: String, - response_latency: Option, - error_message: Option, - ) { - let event = Event::Assistant(AssistantEvent { - conversation_id, - kind, - model: model.to_string(), - response_latency, - error_message, - }); - - self.report_event(event) + pub fn report_assistant_event(self: &Arc, event: AssistantEvent) { + self.report_event(Event::Assistant(event)); } pub fn report_call_event( @@ -461,7 +459,7 @@ impl Telemetry { })) } - pub fn log_edit_event(self: &Arc, environment: &'static str) { + pub fn log_edit_event(self: &Arc, environment: &'static str, is_via_ssh: bool) { let mut state = self.state.lock(); let period_data = state.event_coalescer.log_event(environment); drop(state); @@ -470,6 +468,7 @@ impl Telemetry { let event = Event::Edit(EditEvent { duration: end.timestamp_millis() - start.timestamp_millis(), environment: environment.to_string(), + is_via_ssh, }); self.report_event(event); @@ -490,7 +489,7 @@ impl Telemetry { worktree_id: WorktreeId, updated_entries_set: &UpdatedEntriesSet, ) { - let project_names: Vec = { + let project_type_names: Vec = { let mut state = self.state.lock(); state .worktree_id_map @@ -526,8 +525,8 @@ impl Telemetry { }; // Done on purpose to avoid calling `self.state.lock()` multiple times - for project_name in project_names { - self.report_app_event(format!("open {} project", project_name)); + for project_type_name in project_type_names { + self.report_app_event(format!("open {} project", project_type_name)); } } @@ -599,6 +598,29 @@ impl Telemetry { self.state.lock().is_staff } + fn build_request( + self: &Arc, + // We take in the JSON bytes buffer so we can reuse the existing allocation. + mut json_bytes: Vec, + event_request: EventRequestBody, + ) -> Result> { + json_bytes.clear(); + serde_json::to_writer(&mut json_bytes, &event_request)?; + + let checksum = calculate_json_checksum(&json_bytes).unwrap_or("".to_string()); + + Ok(Request::builder() + .method(Method::POST) + .uri( + self.http_client + .build_zed_api_url("/telemetry/events", &[])? + .as_ref(), + ) + .header("Content-Type", "application/json") + .header("x-zed-checksum", checksum) + .body(json_bytes.into())?) + } + pub fn flush_events(self: &Arc) { let mut state = self.state.lock(); state.first_event_date_time = None; @@ -625,13 +647,14 @@ impl Telemetry { } } - { + let request_body = { let state = this.state.lock(); - let request_body = EventRequestBody { + EventRequestBody { + system_id: state.system_id.as_deref().map(Into::into), installation_id: state.installation_id.as_deref().map(Into::into), - metrics_id: state.metrics_id.as_deref().map(Into::into), session_id: state.session_id.clone(), + metrics_id: state.metrics_id.as_deref().map(Into::into), is_staff: state.is_staff, app_version: state.app_version.clone(), os_name: state.os_name.clone(), @@ -640,25 +663,11 @@ impl Telemetry { release_channel: state.release_channel.map(Into::into), events, - }; - json_bytes.clear(); - serde_json::to_writer(&mut json_bytes, &request_body)?; - } - - let checksum = calculate_json_checksum(&json_bytes).unwrap_or("".to_string()); - - let request = http_client::Request::builder() - .method(Method::POST) - .uri( - this.http_client - .build_zed_api_url("/telemetry/events", &[])? - .as_ref(), - ) - .header("Content-Type", "text/plain") - .header("x-zed-checksum", checksum) - .body(json_bytes.into()); + } + }; - let response = this.http_client.send(request?).await?; + let request = this.build_request(json_bytes, request_body)?; + let response = this.http_client.send(request).await?; if response.status() != 200 { log::error!("Failed to send events: HTTP {:?}", response.status()); } @@ -703,6 +712,7 @@ mod tests { Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), )); let http = FakeHttpClient::with_200_response(); + let system_id = Some("system_id".to_string()); let installation_id = Some("installation_id".to_string()); let session_id = "session_id".to_string(); @@ -710,7 +720,7 @@ mod tests { let telemetry = Telemetry::new(clock.clone(), http, cx); telemetry.state.lock().max_queue_size = 4; - telemetry.start(installation_id, session_id, cx); + telemetry.start(system_id, installation_id, session_id, cx); assert!(is_empty_state(&telemetry)); @@ -788,13 +798,14 @@ mod tests { Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), )); let http = FakeHttpClient::with_200_response(); + let system_id = Some("system_id".to_string()); let installation_id = Some("installation_id".to_string()); let session_id = "session_id".to_string(); cx.update(|cx| { let telemetry = Telemetry::new(clock.clone(), http, cx); telemetry.state.lock().max_queue_size = 4; - telemetry.start(installation_id, session_id, cx); + telemetry.start(system_id, installation_id, session_id, cx); assert!(is_empty_state(&telemetry)); diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 0f53b35fc1fd49..a312dd349507f7 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -138,7 +138,7 @@ enum UpdateContacts { } impl UserStore { - pub fn new(client: Arc, cx: &mut ModelContext) -> Self { + pub fn new(client: Arc, cx: &ModelContext) -> Self { let (mut current_user_tx, current_user_rx) = watch::channel(); let (update_contacts_tx, mut update_contacts_rx) = mpsc::unbounded(); let rpc_subscriptions = vec![ @@ -310,7 +310,7 @@ impl UserStore { fn update_contacts( &mut self, message: UpdateContacts, - cx: &mut ModelContext, + cx: &ModelContext, ) -> Task> { match message { UpdateContacts::Wait(barrier) => { @@ -525,9 +525,9 @@ impl UserStore { } pub fn dismiss_contact_request( - &mut self, + &self, requester_id: u64, - cx: &mut ModelContext, + cx: &ModelContext, ) -> Task> { let client = self.client.upgrade(); cx.spawn(move |_, _| async move { @@ -573,7 +573,7 @@ impl UserStore { }) } - pub fn clear_contacts(&mut self) -> impl Future { + pub fn clear_contacts(&self) -> impl Future { let (tx, mut rx) = postage::barrier::channel(); self.update_contacts_tx .unbounded_send(UpdateContacts::Clear(tx)) @@ -583,7 +583,7 @@ impl UserStore { } } - pub fn contact_updates_done(&mut self) -> impl Future { + pub fn contact_updates_done(&self) -> impl Future { let (tx, mut rx) = postage::barrier::channel(); self.update_contacts_tx .unbounded_send(UpdateContacts::Wait(tx)) @@ -594,9 +594,9 @@ impl UserStore { } pub fn get_users( - &mut self, + &self, user_ids: Vec, - cx: &mut ModelContext, + cx: &ModelContext, ) -> Task>>> { let mut user_ids_to_fetch = user_ids.clone(); user_ids_to_fetch.retain(|id| !self.users.contains_key(id)); @@ -629,9 +629,9 @@ impl UserStore { } pub fn fuzzy_search_users( - &mut self, + &self, query: String, - cx: &mut ModelContext, + cx: &ModelContext, ) -> Task>>> { self.load_users(proto::FuzzySearchUsers { query }, cx) } @@ -640,11 +640,7 @@ impl UserStore { self.users.get(&user_id).cloned() } - pub fn get_user_optimistic( - &mut self, - user_id: u64, - cx: &mut ModelContext, - ) -> Option> { + pub fn get_user_optimistic(&self, user_id: u64, cx: &ModelContext) -> Option> { if let Some(user) = self.users.get(&user_id).cloned() { return Some(user); } @@ -653,11 +649,7 @@ impl UserStore { None } - pub fn get_user( - &mut self, - user_id: u64, - cx: &mut ModelContext, - ) -> Task>> { + pub fn get_user(&self, user_id: u64, cx: &ModelContext) -> Task>> { if let Some(user) = self.users.get(&user_id).cloned() { return Task::ready(Ok(user)); } @@ -697,7 +689,7 @@ impl UserStore { .map(|accepted_tos_at| accepted_tos_at.is_some()) } - pub fn accept_terms_of_service(&mut self, cx: &mut ModelContext) -> Task> { + pub fn accept_terms_of_service(&self, cx: &ModelContext) -> Task> { if self.current_user().is_none() { return Task::ready(Err(anyhow!("no current user"))); }; @@ -726,9 +718,9 @@ impl UserStore { } fn load_users( - &mut self, + &self, request: impl RequestMessage, - cx: &mut ModelContext, + cx: &ModelContext, ) -> Task>>> { let client = self.client.clone(); cx.spawn(|this, mut cx| async move { diff --git a/crates/client/src/zed_urls.rs b/crates/client/src/zed_urls.rs new file mode 100644 index 00000000000000..a5b27cf288e2ee --- /dev/null +++ b/crates/client/src/zed_urls.rs @@ -0,0 +1,19 @@ +//! Contains helper functions for constructing URLs to various Zed-related pages. +//! +//! These URLs will adapt to the configured server URL in order to construct +//! links appropriate for the environment (e.g., by linking to a local copy of +//! zed.dev in development). + +use gpui::AppContext; +use settings::Settings; + +use crate::ClientSettings; + +fn server_url(cx: &AppContext) -> &str { + &ClientSettings::get_global(cx).server_url +} + +/// Returns the URL to the account page on zed.dev. +pub fn account_url(cx: &AppContext) -> String { + format!("{server_url}/account", server_url = server_url(cx)) +} diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index f7d36ed4a87b9d..acbde90dc1ad91 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -9,6 +9,8 @@ use std::{ pub use system_clock::*; +pub const LOCAL_BRANCH_REPLICA_ID: u16 = u16::MAX; + /// A unique identifier for each distributed node. pub type ReplicaId = u16; @@ -25,7 +27,10 @@ pub struct Lamport { /// A [vector clock](https://en.wikipedia.org/wiki/Vector_clock). #[derive(Clone, Default, Hash, Eq, PartialEq)] -pub struct Global(SmallVec<[u32; 8]>); +pub struct Global { + values: SmallVec<[u32; 8]>, + local_branch_value: u32, +} impl Global { pub fn new() -> Self { @@ -33,41 +38,51 @@ impl Global { } pub fn get(&self, replica_id: ReplicaId) -> Seq { - self.0.get(replica_id as usize).copied().unwrap_or(0) as Seq + if replica_id == LOCAL_BRANCH_REPLICA_ID { + self.local_branch_value + } else { + self.values.get(replica_id as usize).copied().unwrap_or(0) as Seq + } } pub fn observe(&mut self, timestamp: Lamport) { if timestamp.value > 0 { - let new_len = timestamp.replica_id as usize + 1; - if new_len > self.0.len() { - self.0.resize(new_len, 0); + if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID { + self.local_branch_value = cmp::max(self.local_branch_value, timestamp.value); + } else { + let new_len = timestamp.replica_id as usize + 1; + if new_len > self.values.len() { + self.values.resize(new_len, 0); + } + + let entry = &mut self.values[timestamp.replica_id as usize]; + *entry = cmp::max(*entry, timestamp.value); } - - let entry = &mut self.0[timestamp.replica_id as usize]; - *entry = cmp::max(*entry, timestamp.value); } } pub fn join(&mut self, other: &Self) { - if other.0.len() > self.0.len() { - self.0.resize(other.0.len(), 0); + if other.values.len() > self.values.len() { + self.values.resize(other.values.len(), 0); } - for (left, right) in self.0.iter_mut().zip(&other.0) { + for (left, right) in self.values.iter_mut().zip(&other.values) { *left = cmp::max(*left, *right); } + + self.local_branch_value = cmp::max(self.local_branch_value, other.local_branch_value); } pub fn meet(&mut self, other: &Self) { - if other.0.len() > self.0.len() { - self.0.resize(other.0.len(), 0); + if other.values.len() > self.values.len() { + self.values.resize(other.values.len(), 0); } let mut new_len = 0; for (ix, (left, right)) in self - .0 + .values .iter_mut() - .zip(other.0.iter().chain(iter::repeat(&0))) + .zip(other.values.iter().chain(iter::repeat(&0))) .enumerate() { if *left == 0 { @@ -80,7 +95,8 @@ impl Global { new_len = ix + 1; } } - self.0.resize(new_len, 0); + self.values.resize(new_len, 0); + self.local_branch_value = cmp::min(self.local_branch_value, other.local_branch_value); } pub fn observed(&self, timestamp: Lamport) -> bool { @@ -88,34 +104,44 @@ impl Global { } pub fn observed_any(&self, other: &Self) -> bool { - self.0 + self.values .iter() - .zip(other.0.iter()) + .zip(other.values.iter()) .any(|(left, right)| *right > 0 && left >= right) + || (other.local_branch_value > 0 && self.local_branch_value >= other.local_branch_value) } pub fn observed_all(&self, other: &Self) -> bool { - let mut rhs = other.0.iter(); - self.0.iter().all(|left| match rhs.next() { + let mut rhs = other.values.iter(); + self.values.iter().all(|left| match rhs.next() { Some(right) => left >= right, None => true, }) && rhs.next().is_none() + && self.local_branch_value >= other.local_branch_value } pub fn changed_since(&self, other: &Self) -> bool { - self.0.len() > other.0.len() + self.values.len() > other.values.len() || self - .0 + .values .iter() - .zip(other.0.iter()) + .zip(other.values.iter()) .any(|(left, right)| left > right) + || self.local_branch_value > other.local_branch_value } pub fn iter(&self) -> impl Iterator + '_ { - self.0.iter().enumerate().map(|(replica_id, seq)| Lamport { - replica_id: replica_id as ReplicaId, - value: *seq, - }) + self.values + .iter() + .enumerate() + .map(|(replica_id, seq)| Lamport { + replica_id: replica_id as ReplicaId, + value: *seq, + }) + .chain((self.local_branch_value > 0).then_some(Lamport { + replica_id: LOCAL_BRANCH_REPLICA_ID, + value: self.local_branch_value, + })) } } @@ -190,7 +216,11 @@ impl fmt::Debug for Global { if timestamp.replica_id > 0 { write!(f, ", ")?; } - write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?; + if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID { + write!(f, ": {}", timestamp.value)?; + } else { + write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?; + } } write!(f, "}}") } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index f8ba847ab2b9c6..ad2c0136681936 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -28,10 +28,11 @@ axum = { version = "0.6", features = ["json", "headers", "ws"] } axum-extra = { version = "0.4", features = ["erased-json"] } base64.workspace = true chrono.workspace = true -clock.workspace = true clickhouse.workspace = true +clock.workspace = true collections.workspace = true dashmap.workspace = true +derive_more.workspace = true envy = "0.4.2" futures.workspace = true google_ai.workspace = true @@ -42,13 +43,14 @@ live_kit_server.workspace = true log.workspace = true nanoid.workspace = true open_ai.workspace = true -supermaven_api.workspace = true parking_lot.workspace = true prometheus = "0.13" prost.workspace = true rand.workspace = true reqwest = { version = "0.11", features = ["json"] } +reqwest_client.workspace = true rpc.workspace = true +rustc-demangle.workspace = true scrypt = "0.11" sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] } semantic_version.workspace = true @@ -60,12 +62,12 @@ sha2.workspace = true sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] } strum.workspace = true subtle.workspace = true -rustc-demangle.workspace = true +supermaven_api.workspace = true telemetry_events.workspace = true text.workspace = true thiserror.workspace = true time.workspace = true -tokio.workspace = true +tokio = { workspace = true, features = ["full"] } toml.workspace = true tower = "0.4" tower-http = { workspace = true, features = ["trace"] } @@ -84,6 +86,7 @@ client = { workspace = true, features = ["test-support"] } collab_ui = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } ctor.workspace = true +dev_server_projects.workspace = true editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true file_finder.workspace = true @@ -91,6 +94,7 @@ fs = { workspace = true, features = ["test-support"] } git = { workspace = true, features = ["test-support"] } git_hosting_providers.workspace = true gpui = { workspace = true, features = ["test-support"] } +headless.workspace = true hyper.workspace = true indoc.workspace = true language = { workspace = true, features = ["test-support"] } @@ -107,7 +111,6 @@ recent_projects = { workspace = true } release_channel.workspace = true remote = { workspace = true, features = ["test-support"] } remote_server.workspace = true -dev_server_projects.workspace = true rpc = { workspace = true, features = ["test-support"] } sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-sqlite"] } serde_json.workspace = true @@ -119,7 +122,6 @@ unindent.workspace = true util.workspace = true workspace = { workspace = true, features = ["test-support"] } worktree = { workspace = true, features = ["test-support"] } -headless.workspace = true [package.metadata.cargo-machete] ignored = ["async-stripe"] diff --git a/crates/collab/README.md b/crates/collab/README.md index 345e82aefed78f..5aa964ee792fee 100644 --- a/crates/collab/README.md +++ b/crates/collab/README.md @@ -23,8 +23,7 @@ To use a different set of admin users, create `crates/collab/seed.json`. ```json { "admins": ["yourgithubhere"], - "channels": ["zed"], - "number_of_users": 20 + "channels": ["zed"] } ``` diff --git a/crates/collab/k8s/collab.template.yml b/crates/collab/k8s/collab.template.yml index dcd935166a7de7..a28c685f5e8cbc 100644 --- a/crates/collab/k8s/collab.template.yml +++ b/crates/collab/k8s/collab.template.yml @@ -149,16 +149,6 @@ spec: secretKeyRef: name: google-ai key: api_key - - name: QWEN2_7B_API_KEY - valueFrom: - secretKeyRef: - name: hugging-face - key: api_key - - name: QWEN2_7B_API_URL - valueFrom: - secretKeyRef: - name: hugging-face - key: qwen2_api_url - name: BLOB_STORE_ACCESS_KEY valueFrom: secretKeyRef: @@ -209,6 +199,12 @@ spec: secretKeyRef: name: slack key: panics_webhook + - name: STRIPE_API_KEY + valueFrom: + secretKeyRef: + name: stripe + key: api_key + optional: true - name: COMPLETE_WITH_LANGUAGE_MODEL_RATE_LIMIT_PER_HOUR value: "1000" - name: SUPERMAVEN_ADMIN_API_KEY diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 5c2c3961600acd..7d8bd8eb1b7d7d 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -112,6 +112,7 @@ CREATE TABLE "worktree_settings_files" ( "worktree_id" INTEGER NOT NULL, "path" VARCHAR NOT NULL, "content" TEXT, + "kind" VARCHAR, PRIMARY KEY(project_id, worktree_id, path), FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE ); @@ -421,6 +422,15 @@ CREATE TABLE dev_server_projects ( paths TEXT NOT NULL ); +CREATE TABLE IF NOT EXISTS billing_preferences ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + user_id INTEGER NOT NULL REFERENCES users(id), + max_monthly_llm_usage_spending_in_cents INTEGER NOT NULL +); + +CREATE UNIQUE INDEX "uix_billing_preferences_on_user_id" ON billing_preferences (user_id); + CREATE TABLE IF NOT EXISTS billing_customers ( id INTEGER PRIMARY KEY AUTOINCREMENT, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, diff --git a/crates/collab/migrations/20241002120231_add_local_settings_kind.sql b/crates/collab/migrations/20241002120231_add_local_settings_kind.sql new file mode 100644 index 00000000000000..aec4ffb8f8519b --- /dev/null +++ b/crates/collab/migrations/20241002120231_add_local_settings_kind.sql @@ -0,0 +1 @@ +ALTER TABLE "worktree_settings_files" ADD COLUMN "kind" VARCHAR; diff --git a/crates/collab/migrations/20241009190639_add_billing_preferences.sql b/crates/collab/migrations/20241009190639_add_billing_preferences.sql new file mode 100644 index 00000000000000..9aa5a1a303668e --- /dev/null +++ b/crates/collab/migrations/20241009190639_add_billing_preferences.sql @@ -0,0 +1,8 @@ +create table if not exists billing_preferences ( + id serial primary key, + created_at timestamp without time zone not null default now(), + user_id integer not null references users(id) on delete cascade, + max_monthly_llm_usage_spending_in_cents integer not null +); + +create unique index "uix_billing_preferences_on_user_id" on billing_preferences (user_id); diff --git a/crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql b/crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql new file mode 100644 index 00000000000000..855e46ab0224dc --- /dev/null +++ b/crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql @@ -0,0 +1,11 @@ +alter table models + add column price_per_million_cache_creation_input_tokens integer not null default 0, + add column price_per_million_cache_read_input_tokens integer not null default 0; + +alter table usages + add column cache_creation_input_tokens_this_month bigint not null default 0, + add column cache_read_input_tokens_this_month bigint not null default 0; + +alter table lifetime_usages + add column cache_creation_input_tokens bigint not null default 0, + add column cache_read_input_tokens bigint not null default 0; diff --git a/crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql b/crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql new file mode 100644 index 00000000000000..c204451b7538d8 --- /dev/null +++ b/crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql @@ -0,0 +1,3 @@ +alter table usages + drop column cache_creation_input_tokens_this_month, + drop column cache_read_input_tokens_this_month; diff --git a/crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql b/crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql new file mode 100644 index 00000000000000..2733552a3a16f2 --- /dev/null +++ b/crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql @@ -0,0 +1,13 @@ +create table monthly_usages ( + id serial primary key, + user_id integer not null, + model_id integer not null references models (id) on delete cascade, + month integer not null, + year integer not null, + input_tokens bigint not null default 0, + cache_creation_input_tokens bigint not null default 0, + cache_read_input_tokens bigint not null default 0, + output_tokens bigint not null default 0 +); + +create unique index uix_monthly_usages_on_user_id_model_id_month_year on monthly_usages (user_id, model_id, month, year); diff --git a/crates/collab/migrations_llm/20241010151249_create_billing_events.sql b/crates/collab/migrations_llm/20241010151249_create_billing_events.sql new file mode 100644 index 00000000000000..74a270872e5f66 --- /dev/null +++ b/crates/collab/migrations_llm/20241010151249_create_billing_events.sql @@ -0,0 +1,12 @@ +create table billing_events ( + id serial primary key, + idempotency_key uuid not null default gen_random_uuid(), + user_id integer not null, + model_id integer not null references models (id) on delete cascade, + input_tokens bigint not null default 0, + input_cache_creation_tokens bigint not null default 0, + input_cache_read_tokens bigint not null default 0, + output_tokens bigint not null default 0 +); + +create index uix_billing_events_on_user_id_model_id on billing_events (user_id, model_id); diff --git a/crates/collab/seed.default.json b/crates/collab/seed.default.json index 1abec644beed9f..dee924e103d620 100644 --- a/crates/collab/seed.default.json +++ b/crates/collab/seed.default.json @@ -8,6 +8,5 @@ "JosephTLyons", "rgbkrk" ], - "channels": ["zed"], - "number_of_users": 100 + "channels": ["zed"] } diff --git a/crates/collab/seed/github_users.json b/crates/collab/seed/github_users.json new file mode 100644 index 00000000000000..88acd6aa54a709 --- /dev/null +++ b/crates/collab/seed/github_users.json @@ -0,0 +1,602 @@ +[ + { + "id": 1, + "login": "mojombo", + "email": "tom@mojombo.com", + "created_at": "2007-10-20T05:24:19Z" + }, + { + "id": 2, + "login": "defunkt", + "email": null, + "created_at": "2007-10-20T05:24:19Z" + }, + { + "id": 3, + "login": "pjhyett", + "email": "pj@hyett.com", + "created_at": "2008-01-07T17:54:22Z" + }, + { + "id": 4, + "login": "wycats", + "email": "wycats@gmail.com", + "created_at": "2008-01-12T05:38:33Z" + }, + { + "id": 5, + "login": "ezmobius", + "email": null, + "created_at": "2008-01-12T07:51:46Z" + }, + { + "id": 6, + "login": "ivey", + "email": "ivey@gweezlebur.com", + "created_at": "2008-01-12T15:15:00Z" + }, + { + "id": 7, + "login": "evanphx", + "email": "evan@phx.io", + "created_at": "2008-01-12T16:46:24Z" + }, + { + "id": 17, + "login": "vanpelt", + "email": "vanpelt@wandb.com", + "created_at": "2008-01-13T05:57:18Z" + }, + { + "id": 18, + "login": "wayneeseguin", + "email": "wayneeseguin@gmail.com", + "created_at": "2008-01-13T06:02:21Z" + }, + { + "id": 19, + "login": "brynary", + "email": null, + "created_at": "2008-01-13T10:19:47Z" + }, + { + "id": 20, + "login": "kevinclark", + "email": "kevin.clark@gmail.com", + "created_at": "2008-01-13T18:33:26Z" + }, + { + "id": 21, + "login": "technoweenie", + "email": "technoweenie@hey.com", + "created_at": "2008-01-14T04:33:35Z" + }, + { + "id": 22, + "login": "macournoyer", + "email": "macournoyer@gmail.com", + "created_at": "2008-01-14T10:49:35Z" + }, + { + "id": 23, + "login": "takeo", + "email": "toby@takeo.email", + "created_at": "2008-01-14T11:25:49Z" + }, + { + "id": 25, + "login": "caged", + "email": "encytemedia@gmail.com", + "created_at": "2008-01-15T04:47:24Z" + }, + { + "id": 26, + "login": "topfunky", + "email": null, + "created_at": "2008-01-15T05:40:05Z" + }, + { + "id": 27, + "login": "anotherjesse", + "email": "anotherjesse@gmail.com", + "created_at": "2008-01-15T07:49:30Z" + }, + { + "id": 28, + "login": "roland", + "email": null, + "created_at": "2008-01-15T08:12:51Z" + }, + { + "id": 29, + "login": "lukas", + "email": "lukas@wandb.com", + "created_at": "2008-01-15T12:50:02Z" + }, + { + "id": 30, + "login": "fanvsfan", + "email": null, + "created_at": "2008-01-15T14:15:23Z" + }, + { + "id": 31, + "login": "tomtt", + "email": null, + "created_at": "2008-01-15T15:44:31Z" + }, + { + "id": 32, + "login": "railsjitsu", + "email": null, + "created_at": "2008-01-16T04:57:23Z" + }, + { + "id": 34, + "login": "nitay", + "email": null, + "created_at": "2008-01-18T14:09:11Z" + }, + { + "id": 35, + "login": "kevwil", + "email": null, + "created_at": "2008-01-19T05:50:12Z" + }, + { + "id": 36, + "login": "KirinDave", + "email": null, + "created_at": "2008-01-19T08:01:02Z" + }, + { + "id": 37, + "login": "jamesgolick", + "email": "jamesgolick@gmail.com", + "created_at": "2008-01-19T22:52:30Z" + }, + { + "id": 38, + "login": "atmos", + "email": "atmos@atmos.org", + "created_at": "2008-01-22T09:14:11Z" + }, + { + "id": 44, + "login": "errfree", + "email": null, + "created_at": "2008-01-24T02:08:37Z" + }, + { + "id": 45, + "login": "mojodna", + "email": null, + "created_at": "2008-01-24T04:40:22Z" + }, + { + "id": 46, + "login": "bmizerany", + "email": "blake.mizerany@gmail.com", + "created_at": "2008-01-24T04:44:30Z" + }, + { + "id": 47, + "login": "jnewland", + "email": "jesse@jnewland.com", + "created_at": "2008-01-25T02:28:12Z" + }, + { + "id": 48, + "login": "joshknowles", + "email": "joshknowles@gmail.com", + "created_at": "2008-01-25T21:30:42Z" + }, + { + "id": 49, + "login": "hornbeck", + "email": "hornbeck@gmail.com", + "created_at": "2008-01-25T21:49:23Z" + }, + { + "id": 50, + "login": "jwhitmire", + "email": "jeff@jwhitmire.com", + "created_at": "2008-01-25T22:07:48Z" + }, + { + "id": 51, + "login": "elbowdonkey", + "email": null, + "created_at": "2008-01-25T22:08:20Z" + }, + { + "id": 52, + "login": "reinh", + "email": null, + "created_at": "2008-01-25T22:16:29Z" + }, + { + "id": 53, + "login": "knzai", + "email": "git@knz.ai", + "created_at": "2008-01-25T22:33:10Z" + }, + { + "id": 68, + "login": "bs", + "email": "yap@bri.tt", + "created_at": "2008-01-27T01:46:29Z" + }, + { + "id": 69, + "login": "rsanheim", + "email": null, + "created_at": "2008-01-27T07:09:47Z" + }, + { + "id": 70, + "login": "schacon", + "email": "schacon@gmail.com", + "created_at": "2008-01-27T17:19:28Z" + }, + { + "id": 71, + "login": "uggedal", + "email": null, + "created_at": "2008-01-27T22:18:57Z" + }, + { + "id": 72, + "login": "bruce", + "email": "brwcodes@gmail.com", + "created_at": "2008-01-28T07:16:45Z" + }, + { + "id": 73, + "login": "sam", + "email": "ssmoot@gmail.com", + "created_at": "2008-01-28T19:01:26Z" + }, + { + "id": 74, + "login": "mmower", + "email": "self@mattmower.com", + "created_at": "2008-01-28T19:47:50Z" + }, + { + "id": 75, + "login": "abhay", + "email": null, + "created_at": "2008-01-28T21:08:23Z" + }, + { + "id": 76, + "login": "rabble", + "email": "evan@protest.net", + "created_at": "2008-01-28T23:27:02Z" + }, + { + "id": 77, + "login": "benburkert", + "email": "ben@benburkert.com", + "created_at": "2008-01-28T23:44:14Z" + }, + { + "id": 78, + "login": "indirect", + "email": "andre@arko.net", + "created_at": "2008-01-29T07:59:27Z" + }, + { + "id": 79, + "login": "fearoffish", + "email": "me@fearof.fish", + "created_at": "2008-01-29T08:43:10Z" + }, + { + "id": 80, + "login": "ry", + "email": "ry@tinyclouds.org", + "created_at": "2008-01-29T08:50:34Z" + }, + { + "id": 81, + "login": "engineyard", + "email": null, + "created_at": "2008-01-29T09:51:30Z" + }, + { + "id": 82, + "login": "jsierles", + "email": null, + "created_at": "2008-01-29T11:10:25Z" + }, + { + "id": 83, + "login": "tweibley", + "email": null, + "created_at": "2008-01-29T13:52:07Z" + }, + { + "id": 84, + "login": "peimei", + "email": "james@railsjitsu.com", + "created_at": "2008-01-29T15:44:11Z" + }, + { + "id": 85, + "login": "brixen", + "email": "brixen@gmail.com", + "created_at": "2008-01-29T16:47:55Z" + }, + { + "id": 87, + "login": "tmornini", + "email": null, + "created_at": "2008-01-29T18:43:39Z" + }, + { + "id": 88, + "login": "outerim", + "email": "lee@outerim.com", + "created_at": "2008-01-29T18:48:32Z" + }, + { + "id": 89, + "login": "daksis", + "email": null, + "created_at": "2008-01-29T19:18:16Z" + }, + { + "id": 90, + "login": "sr", + "email": "me@simonrozet.com", + "created_at": "2008-01-29T20:37:53Z" + }, + { + "id": 91, + "login": "lifo", + "email": null, + "created_at": "2008-01-29T23:09:30Z" + }, + { + "id": 92, + "login": "rsl", + "email": "sconds@gmail.com", + "created_at": "2008-01-29T23:13:36Z" + }, + { + "id": 93, + "login": "imownbey", + "email": null, + "created_at": "2008-01-29T23:13:44Z" + }, + { + "id": 94, + "login": "dylanegan", + "email": null, + "created_at": "2008-01-29T23:15:18Z" + }, + { + "id": 95, + "login": "jm", + "email": "jeremymcanally@gmail.com", + "created_at": "2008-01-29T23:15:32Z" + }, + { + "id": 100, + "login": "kmarsh", + "email": "kevin.marsh@gmail.com", + "created_at": "2008-01-29T23:48:24Z" + }, + { + "id": 101, + "login": "jvantuyl", + "email": "jayson@aggressive.ly", + "created_at": "2008-01-30T01:11:50Z" + }, + { + "id": 102, + "login": "BrianTheCoder", + "email": "wbsmith83@gmail.com", + "created_at": "2008-01-30T02:22:32Z" + }, + { + "id": 103, + "login": "freeformz", + "email": "freeformz@gmail.com", + "created_at": "2008-01-30T06:19:57Z" + }, + { + "id": 104, + "login": "hassox", + "email": "dneighman@gmail.com", + "created_at": "2008-01-30T06:31:06Z" + }, + { + "id": 105, + "login": "automatthew", + "email": "automatthew@gmail.com", + "created_at": "2008-01-30T19:00:58Z" + }, + { + "id": 106, + "login": "queso", + "email": "Joshua.owens@gmail.com", + "created_at": "2008-01-30T19:48:45Z" + }, + { + "id": 107, + "login": "lancecarlson", + "email": null, + "created_at": "2008-01-30T19:53:29Z" + }, + { + "id": 108, + "login": "drnic", + "email": "drnicwilliams@gmail.com", + "created_at": "2008-01-30T23:19:18Z" + }, + { + "id": 109, + "login": "lukesutton", + "email": null, + "created_at": "2008-01-31T04:01:02Z" + }, + { + "id": 110, + "login": "danwrong", + "email": null, + "created_at": "2008-01-31T08:51:31Z" + }, + { + "id": 111, + "login": "HamptonMakes", + "email": "hampton@hamptoncatlin.com", + "created_at": "2008-01-31T17:03:51Z" + }, + { + "id": 112, + "login": "jfrost", + "email": null, + "created_at": "2008-01-31T22:14:27Z" + }, + { + "id": 113, + "login": "mattetti", + "email": null, + "created_at": "2008-01-31T22:56:31Z" + }, + { + "id": 114, + "login": "ctennis", + "email": "c@leb.tennis", + "created_at": "2008-01-31T23:43:14Z" + }, + { + "id": 115, + "login": "lawrencepit", + "email": "lawrence.pit@gmail.com", + "created_at": "2008-01-31T23:57:16Z" + }, + { + "id": 116, + "login": "marcjeanson", + "email": "github@marcjeanson.com", + "created_at": "2008-02-01T01:27:19Z" + }, + { + "id": 117, + "login": "grempe", + "email": null, + "created_at": "2008-02-01T04:12:42Z" + }, + { + "id": 118, + "login": "peterc", + "email": "git@peterc.org", + "created_at": "2008-02-02T01:00:36Z" + }, + { + "id": 119, + "login": "ministrycentered", + "email": null, + "created_at": "2008-02-02T03:50:26Z" + }, + { + "id": 120, + "login": "afarnham", + "email": null, + "created_at": "2008-02-02T05:11:03Z" + }, + { + "id": 121, + "login": "up_the_irons", + "email": null, + "created_at": "2008-02-02T10:59:51Z" + }, + { + "id": 122, + "login": "cristibalan", + "email": "cristibalan@gmail.com", + "created_at": "2008-02-02T11:29:45Z" + }, + { + "id": 123, + "login": "heavysixer", + "email": null, + "created_at": "2008-02-02T15:06:53Z" + }, + { + "id": 124, + "login": "brosner", + "email": "brosner@gmail.com", + "created_at": "2008-02-02T19:03:54Z" + }, + { + "id": 125, + "login": "danielmorrison", + "email": "daniel@collectiveidea.com", + "created_at": "2008-02-02T19:46:35Z" + }, + { + "id": 126, + "login": "danielharan", + "email": "chebuctonian@gmail.com", + "created_at": "2008-02-02T21:42:21Z" + }, + { + "id": 127, + "login": "kvnsmth", + "email": null, + "created_at": "2008-02-02T22:00:03Z" + }, + { + "id": 128, + "login": "collectiveidea", + "email": "info@collectiveidea.com", + "created_at": "2008-02-02T22:34:46Z" + }, + { + "id": 129, + "login": "canadaduane", + "email": "duane.johnson@gmail.com", + "created_at": "2008-02-02T23:25:39Z" + }, + { + "id": 130, + "login": "corasaurus-hex", + "email": "cora@sutton.me", + "created_at": "2008-02-03T04:20:22Z" + }, + { + "id": 131, + "login": "dstrelau", + "email": null, + "created_at": "2008-02-03T14:59:12Z" + }, + { + "id": 132, + "login": "sunny", + "email": "sunny@sunfox.org", + "created_at": "2008-02-03T15:43:43Z" + }, + { + "id": 133, + "login": "dkubb", + "email": "github@dan.kubb.ca", + "created_at": "2008-02-03T20:40:13Z" + }, + { + "id": 134, + "login": "jnicklas", + "email": "jonas@jnicklas.com", + "created_at": "2008-02-03T20:43:50Z" + }, + { + "id": 135, + "login": "richcollins", + "email": "richcollins@gmail.com", + "created_at": "2008-02-03T21:11:25Z" + } +] diff --git a/crates/collab/src/api/billing.rs b/crates/collab/src/api/billing.rs index 23a16590cac630..5e167a668c4f57 100644 --- a/crates/collab/src/api/billing.rs +++ b/crates/collab/src/api/billing.rs @@ -1,7 +1,3 @@ -use std::str::FromStr; -use std::sync::Arc; -use std::time::Duration; - use anyhow::{anyhow, bail, Context}; use axum::{ extract::{self, Query}, @@ -9,29 +5,43 @@ use axum::{ Extension, Json, Router, }; use chrono::{DateTime, SecondsFormat, Utc}; +use collections::HashSet; use reqwest::StatusCode; use sea_orm::ActiveValue; use serde::{Deserialize, Serialize}; +use std::{str::FromStr, sync::Arc, time::Duration}; use stripe::{ - BillingPortalSession, CheckoutSession, CreateBillingPortalSession, - CreateBillingPortalSessionFlowData, CreateBillingPortalSessionFlowDataAfterCompletion, + BillingPortalSession, CreateBillingPortalSession, CreateBillingPortalSessionFlowData, + CreateBillingPortalSessionFlowDataAfterCompletion, CreateBillingPortalSessionFlowDataAfterCompletionRedirect, - CreateBillingPortalSessionFlowDataType, CreateCheckoutSession, CreateCheckoutSessionLineItems, - CreateCustomer, Customer, CustomerId, EventObject, EventType, Expandable, ListEvents, - Subscription, SubscriptionId, SubscriptionStatus, + CreateBillingPortalSessionFlowDataType, CreateCustomer, Customer, CustomerId, EventObject, + EventType, Expandable, ListEvents, Subscription, SubscriptionId, SubscriptionStatus, }; use util::ResultExt; -use crate::db::billing_subscription::StripeSubscriptionStatus; -use crate::db::{ - billing_customer, BillingSubscriptionId, CreateBillingCustomerParams, - CreateBillingSubscriptionParams, CreateProcessedStripeEventParams, UpdateBillingCustomerParams, - UpdateBillingSubscriptionParams, +use crate::llm::{DEFAULT_MAX_MONTHLY_SPEND, FREE_TIER_MONTHLY_SPENDING_LIMIT}; +use crate::rpc::{ResultExt as _, Server}; +use crate::{ + db::{ + billing_customer, BillingSubscriptionId, CreateBillingCustomerParams, + CreateBillingSubscriptionParams, CreateProcessedStripeEventParams, + UpdateBillingCustomerParams, UpdateBillingPreferencesParams, + UpdateBillingSubscriptionParams, + }, + stripe_billing::StripeBilling, +}; +use crate::{ + db::{billing_subscription::StripeSubscriptionStatus, UserId}, + llm::db::LlmDatabase, }; use crate::{AppState, Error, Result}; pub fn router() -> Router { Router::new() + .route( + "/billing/preferences", + get(get_billing_preferences).put(update_billing_preferences), + ) .route( "/billing/subscriptions", get(list_billing_subscriptions).post(create_billing_subscription), @@ -40,6 +50,86 @@ pub fn router() -> Router { "/billing/subscriptions/manage", post(manage_billing_subscription), ) + .route("/billing/monthly_spend", get(get_monthly_spend)) +} + +#[derive(Debug, Deserialize)] +struct GetBillingPreferencesParams { + github_user_id: i32, +} + +#[derive(Debug, Serialize)] +struct BillingPreferencesResponse { + max_monthly_llm_usage_spending_in_cents: i32, +} + +async fn get_billing_preferences( + Extension(app): Extension>, + Query(params): Query, +) -> Result> { + let user = app + .db + .get_user_by_github_user_id(params.github_user_id) + .await? + .ok_or_else(|| anyhow!("user not found"))?; + + let preferences = app.db.get_billing_preferences(user.id).await?; + + Ok(Json(BillingPreferencesResponse { + max_monthly_llm_usage_spending_in_cents: preferences + .map_or(DEFAULT_MAX_MONTHLY_SPEND.0 as i32, |preferences| { + preferences.max_monthly_llm_usage_spending_in_cents + }), + })) +} + +#[derive(Debug, Deserialize)] +struct UpdateBillingPreferencesBody { + github_user_id: i32, + max_monthly_llm_usage_spending_in_cents: i32, +} + +async fn update_billing_preferences( + Extension(app): Extension>, + Extension(rpc_server): Extension>, + extract::Json(body): extract::Json, +) -> Result> { + let user = app + .db + .get_user_by_github_user_id(body.github_user_id) + .await? + .ok_or_else(|| anyhow!("user not found"))?; + + let billing_preferences = + if let Some(_billing_preferences) = app.db.get_billing_preferences(user.id).await? { + app.db + .update_billing_preferences( + user.id, + &UpdateBillingPreferencesParams { + max_monthly_llm_usage_spending_in_cents: ActiveValue::set( + body.max_monthly_llm_usage_spending_in_cents, + ), + }, + ) + .await? + } else { + app.db + .create_billing_preferences( + user.id, + &crate::db::CreateBillingPreferencesParams { + max_monthly_llm_usage_spending_in_cents: body + .max_monthly_llm_usage_spending_in_cents, + }, + ) + .await? + }; + + rpc_server.refresh_llm_tokens_for_user(user.id).await; + + Ok(Json(BillingPreferencesResponse { + max_monthly_llm_usage_spending_in_cents: billing_preferences + .max_monthly_llm_usage_spending_in_cents, + })) } #[derive(Debug, Deserialize)] @@ -79,7 +169,7 @@ async fn list_billing_subscriptions( .into_iter() .map(|subscription| BillingSubscriptionJson { id: subscription.id, - name: "Zed Pro".to_string(), + name: "Zed LLM Usage".to_string(), status: subscription.stripe_subscription_status, cancel_at: subscription.stripe_cancel_at.map(|cancel_at| { cancel_at @@ -114,12 +204,22 @@ async fn create_billing_subscription( .await? .ok_or_else(|| anyhow!("user not found"))?; - let Some((stripe_client, stripe_price_id)) = app - .stripe_client - .clone() - .zip(app.config.stripe_price_id.clone()) - else { - log::error!("failed to retrieve Stripe client or price ID"); + let Some(stripe_client) = app.stripe_client.clone() else { + log::error!("failed to retrieve Stripe client"); + Err(Error::http( + StatusCode::NOT_IMPLEMENTED, + "not supported".into(), + ))? + }; + let Some(stripe_billing) = app.stripe_billing.clone() else { + log::error!("failed to retrieve Stripe billing object"); + Err(Error::http( + StatusCode::NOT_IMPLEMENTED, + "not supported".into(), + ))? + }; + let Some(llm_db) = app.llm_db.clone() else { + log::error!("failed to retrieve LLM database"); Err(Error::http( StatusCode::NOT_IMPLEMENTED, "not supported".into(), @@ -143,26 +243,14 @@ async fn create_billing_subscription( customer.id }; - let checkout_session = { - let mut params = CreateCheckoutSession::new(); - params.mode = Some(stripe::CheckoutSessionMode::Subscription); - params.customer = Some(customer_id); - params.client_reference_id = Some(user.github_login.as_str()); - params.line_items = Some(vec![CreateCheckoutSessionLineItems { - price: Some(stripe_price_id.to_string()), - quantity: Some(1), - ..Default::default() - }]); - let success_url = format!("{}/account", app.config.zed_dot_dev_url()); - params.success_url = Some(&success_url); - - CheckoutSession::create(&stripe_client, params).await? - }; - + let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?; + let stripe_model = stripe_billing.register_model(default_model).await?; + let success_url = format!("{}/account", app.config.zed_dot_dev_url()); + let checkout_session_url = stripe_billing + .checkout(customer_id, &user.github_login, &stripe_model, &success_url) + .await?; Ok(Json(CreateBillingSubscriptionResponse { - checkout_session_url: checkout_session - .url - .ok_or_else(|| anyhow!("no checkout session URL"))?, + checkout_session_url, })) } @@ -317,7 +405,7 @@ const NUMBER_OF_ALREADY_PROCESSED_PAGES_BEFORE_WE_STOP: usize = 4; /// Polls the Stripe events API periodically to reconcile the records in our /// database with the data in Stripe. -pub fn poll_stripe_events_periodically(app: Arc) { +pub fn poll_stripe_events_periodically(app: Arc, rpc_server: Arc) { let Some(stripe_client) = app.stripe_client.clone() else { log::warn!("failed to retrieve Stripe client"); return; @@ -328,7 +416,9 @@ pub fn poll_stripe_events_periodically(app: Arc) { let executor = executor.clone(); async move { loop { - poll_stripe_events(&app, &stripe_client).await.log_err(); + poll_stripe_events(&app, &rpc_server, &stripe_client) + .await + .log_err(); executor.sleep(POLL_EVENTS_INTERVAL).await; } @@ -338,6 +428,7 @@ pub fn poll_stripe_events_periodically(app: Arc) { async fn poll_stripe_events( app: &Arc, + rpc_server: &Arc, stripe_client: &stripe::Client, ) -> anyhow::Result<()> { fn event_type_to_string(event_type: EventType) -> String { @@ -362,29 +453,28 @@ async fn poll_stripe_events( let mut pages_of_already_processed_events = 0; let mut unprocessed_events = Vec::new(); - loop { - if pages_of_already_processed_events >= NUMBER_OF_ALREADY_PROCESSED_PAGES_BEFORE_WE_STOP { - log::info!("saw {pages_of_already_processed_events} pages of already-processed events: stopping event retrieval"); - break; - } - - log::info!("retrieving events from Stripe: {}", event_types.join(", ")); - - let mut params = ListEvents::new(); - params.types = Some(event_types.clone()); - params.limit = Some(EVENTS_LIMIT_PER_PAGE); + log::info!( + "Stripe events: starting retrieval for {}", + event_types.join(", ") + ); + let mut params = ListEvents::new(); + params.types = Some(event_types.clone()); + params.limit = Some(EVENTS_LIMIT_PER_PAGE); - let events = stripe::Event::list(stripe_client, ¶ms).await?; + let mut event_pages = stripe::Event::list(&stripe_client, ¶ms) + .await? + .paginate(params); + loop { let processed_event_ids = { - let event_ids = &events + let event_ids = event_pages + .page .data .iter() .map(|event| event.id.as_str()) .collect::>(); - app.db - .get_processed_stripe_events_by_event_ids(event_ids) + .get_processed_stripe_events_by_event_ids(&event_ids) .await? .into_iter() .map(|event| event.stripe_event_id) @@ -392,13 +482,13 @@ async fn poll_stripe_events( }; let mut processed_events_in_page = 0; - let events_in_page = events.data.len(); - for event in events.data { + let events_in_page = event_pages.page.data.len(); + for event in &event_pages.page.data { if processed_event_ids.contains(&event.id.to_string()) { processed_events_in_page += 1; - log::debug!("Stripe event {} already processed: skipping", event.id); + log::debug!("Stripe events: already processed '{}', skipping", event.id); } else { - unprocessed_events.push(event); + unprocessed_events.push(event.clone()); } } @@ -406,15 +496,21 @@ async fn poll_stripe_events( pages_of_already_processed_events += 1; } - if !events.has_more { + if event_pages.page.has_more { + if pages_of_already_processed_events >= NUMBER_OF_ALREADY_PROCESSED_PAGES_BEFORE_WE_STOP + { + log::info!("Stripe events: stopping, saw {pages_of_already_processed_events} pages of already-processed events"); + break; + } else { + log::info!("Stripe events: retrieving next page"); + event_pages = event_pages.next(&stripe_client).await?; + } + } else { break; } } - log::info!( - "unprocessed events from Stripe: {}", - unprocessed_events.len() - ); + log::info!("Stripe events: unprocessed {}", unprocessed_events.len()); // Sort all of the unprocessed events in ascending order, so we can handle them in the order they occurred. unprocessed_events.sort_by(|a, b| a.created.cmp(&b.created).then_with(|| a.id.cmp(&b.id))); @@ -430,12 +526,12 @@ async fn poll_stripe_events( // If the event has happened too far in the past, we don't want to // process it and risk overwriting other more-recent updates. // - // 1 hour was chosen arbitrarily. This could be made longer or shorter. - let one_hour = Duration::from_secs(60 * 60); - let an_hour_ago = Utc::now() - one_hour; - if an_hour_ago.timestamp() > event.created { + // 1 day was chosen arbitrarily. This could be made longer or shorter. + let one_day = Duration::from_secs(24 * 60 * 60); + let a_day_ago = Utc::now() - one_day; + if a_day_ago.timestamp() > event.created { log::info!( - "Stripe event {} is more than {one_hour:?} old, marking as processed", + "Stripe events: event '{}' is more than {one_day:?} old, marking as processed", event_id ); app.db @@ -454,7 +550,7 @@ async fn poll_stripe_events( | EventType::CustomerSubscriptionPaused | EventType::CustomerSubscriptionResumed | EventType::CustomerSubscriptionDeleted => { - handle_customer_subscription_event(app, stripe_client, event).await + handle_customer_subscription_event(app, rpc_server, stripe_client, event).await } _ => Ok(()), }; @@ -522,6 +618,7 @@ async fn handle_customer_event( async fn handle_customer_subscription_event( app: &Arc, + rpc_server: &Arc, stripe_client: &stripe::Client, event: stripe::Event, ) -> anyhow::Result<()> { @@ -567,9 +664,52 @@ async fn handle_customer_subscription_event( .await?; } + // When the user's subscription changes, we want to refresh their LLM tokens + // to either grant/revoke access. + rpc_server + .refresh_llm_tokens_for_user(billing_customer.user_id) + .await; + Ok(()) } +#[derive(Debug, Deserialize)] +struct GetMonthlySpendParams { + github_user_id: i32, +} + +#[derive(Debug, Serialize)] +struct GetMonthlySpendResponse { + monthly_spend_in_cents: i32, +} + +async fn get_monthly_spend( + Extension(app): Extension>, + Query(params): Query, +) -> Result> { + let user = app + .db + .get_user_by_github_user_id(params.github_user_id) + .await? + .ok_or_else(|| anyhow!("user not found"))?; + + let Some(llm_db) = app.llm_db.clone() else { + return Err(Error::http( + StatusCode::NOT_IMPLEMENTED, + "LLM database not available".into(), + )); + }; + + let monthly_spend = llm_db + .get_user_spending_for_month(user.id, Utc::now()) + .await? + .saturating_sub(FREE_TIER_MONTHLY_SPENDING_LIMIT); + + Ok(Json(GetMonthlySpendResponse { + monthly_spend_in_cents: monthly_spend.0 as i32, + })) +} + impl From for StripeSubscriptionStatus { fn from(value: SubscriptionStatus) -> Self { match value { @@ -631,3 +771,74 @@ async fn find_or_create_billing_customer( Ok(Some(billing_customer)) } + +const SYNC_LLM_USAGE_WITH_STRIPE_INTERVAL: Duration = Duration::from_secs(60); + +pub fn sync_llm_usage_with_stripe_periodically(app: Arc) { + let Some(stripe_billing) = app.stripe_billing.clone() else { + log::warn!("failed to retrieve Stripe billing object"); + return; + }; + let Some(llm_db) = app.llm_db.clone() else { + log::warn!("failed to retrieve LLM database"); + return; + }; + + let executor = app.executor.clone(); + executor.spawn_detached({ + let executor = executor.clone(); + async move { + loop { + sync_with_stripe(&app, &llm_db, &stripe_billing) + .await + .context("failed to sync LLM usage to Stripe") + .trace_err(); + executor.sleep(SYNC_LLM_USAGE_WITH_STRIPE_INTERVAL).await; + } + } + }); +} + +async fn sync_with_stripe( + app: &Arc, + llm_db: &Arc, + stripe_billing: &Arc, +) -> anyhow::Result<()> { + let events = llm_db.get_billing_events().await?; + let user_ids = events + .iter() + .map(|(event, _)| event.user_id) + .collect::>(); + let stripe_subscriptions = app.db.get_active_billing_subscriptions(user_ids).await?; + + for (event, model) in events { + let Some((stripe_db_customer, stripe_db_subscription)) = + stripe_subscriptions.get(&event.user_id) + else { + tracing::warn!( + user_id = event.user_id.0, + "Registered billing event for user who is not a Stripe customer. Billing events should only be created for users who are Stripe customers, so this is a mistake on our side." + ); + continue; + }; + let stripe_subscription_id: stripe::SubscriptionId = stripe_db_subscription + .stripe_subscription_id + .parse() + .context("failed to parse stripe subscription id from db")?; + let stripe_customer_id: stripe::CustomerId = stripe_db_customer + .stripe_customer_id + .parse() + .context("failed to parse stripe customer id from db")?; + + let stripe_model = stripe_billing.register_model(&model).await?; + stripe_billing + .subscribe_to_model(&stripe_subscription_id, &stripe_model) + .await?; + stripe_billing + .bill_model_usage(&stripe_customer_id, &stripe_model, &event) + .await?; + llm_db.consume_billing_event(event.id).await?; + } + + Ok(()) +} diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 30ed10a76fb0c0..c1ab3d7939a075 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -18,12 +18,12 @@ use sha2::{Digest, Sha256}; use std::sync::{Arc, OnceLock}; use telemetry_events::{ ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event, - EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent, - SettingEvent, + EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, Panic, + ReplEvent, SettingEvent, }; use uuid::Uuid; -static CRASH_REPORTS_BUCKET: &str = "zed-crash-reports"; +const CRASH_REPORTS_BUCKET: &str = "zed-crash-reports"; pub fn router() -> Router { Router::new() @@ -149,7 +149,8 @@ pub async fn post_crash( installation_id = %installation_id, description = %description, backtrace = %summary, - "crash report"); + "crash report" + ); if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { let payload = slack::WebhookBody::new(|w| { @@ -295,10 +296,11 @@ pub async fn post_panic( version = %panic.app_version, os_name = %panic.os_name, os_version = %panic.os_version.clone().unwrap_or_default(), - installation_id = %panic.installation_id.unwrap_or_default(), + installation_id = %panic.installation_id.clone().unwrap_or_default(), description = %panic.payload, backtrace = %panic.backtrace.join("\n"), - "panic report"); + "panic report" + ); let backtrace = if panic.backtrace.len() > 25 { let total = panic.backtrace.len(); @@ -316,6 +318,11 @@ pub async fn post_panic( } else { panic.backtrace.join("\n") }; + + if !report_to_slack(&panic) { + return Ok(()); + } + let backtrace_with_summary = panic.payload + "\n" + &backtrace; if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { @@ -356,6 +363,25 @@ pub async fn post_panic( Ok(()) } +fn report_to_slack(panic: &Panic) -> bool { + if panic.payload.contains("ERROR_SURFACE_LOST_KHR") { + return false; + } + + if panic.payload.contains("ERROR_INITIALIZATION_FAILED") { + return false; + } + + if panic + .payload + .contains("GPU has crashed, and no debug information is available") + { + return false; + } + + true +} + pub async fn post_events( Extension(app): Extension>, TypedHeader(ZedChecksumHeader(checksum)): TypedHeader, @@ -403,8 +429,6 @@ pub async fn post_events( country_code.clone(), checksum_matched, )), - // Needed for clients sending old copilot_event types - Event::Copilot(_) => {} Event::InlineCompletion(event) => { to_upload .inline_completion_events @@ -627,7 +651,9 @@ where #[derive(Serialize, Debug, clickhouse::Row)] pub struct EditorEventRow { + system_id: String, installation_id: String, + session_id: Option, metrics_id: String, operation: String, app_version: String, @@ -644,14 +670,13 @@ pub struct EditorEventRow { time: i64, copilot_enabled: bool, copilot_enabled_for_language: bool, - historical_event: bool, architecture: String, is_staff: Option, - session_id: Option, major: Option, minor: Option, patch: Option, checksum_matched: bool, + is_via_ssh: bool, } impl EditorEventRow { @@ -677,9 +702,10 @@ impl EditorEventRow { os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), architecture: body.architecture.clone(), + system_id: body.system_id.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), - metrics_id: body.metrics_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), + metrics_id: body.metrics_id.clone().unwrap_or_default(), is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, @@ -691,7 +717,7 @@ impl EditorEventRow { country_code: country_code.unwrap_or("XX".to_string()), region_code: "".to_string(), city: "".to_string(), - historical_event: false, + is_via_ssh: event.is_via_ssh, } } } @@ -699,6 +725,7 @@ impl EditorEventRow { #[derive(Serialize, Debug, clickhouse::Row)] pub struct InlineCompletionEventRow { installation_id: String, + session_id: Option, provider: String, suggestion_accepted: bool, app_version: String, @@ -713,7 +740,6 @@ pub struct InlineCompletionEventRow { city: String, time: i64, is_staff: Option, - session_id: Option, major: Option, minor: Option, patch: Option, @@ -834,6 +860,7 @@ pub struct AssistantEventRow { // AssistantEventRow conversation_id: String, kind: String, + phase: String, model: String, response_latency_in_ms: Option, error_message: Option, @@ -866,6 +893,7 @@ impl AssistantEventRow { time: time.timestamp_millis(), conversation_id: event.conversation_id.unwrap_or_default(), kind: event.kind.to_string(), + phase: event.phase.to_string(), model: event.model, response_latency_in_ms: event .response_latency @@ -878,6 +906,7 @@ impl AssistantEventRow { #[derive(Debug, clickhouse::Row, Serialize)] pub struct CpuEventRow { installation_id: Option, + session_id: Option, is_staff: Option, usage_as_percentage: f32, core_count: u32, @@ -886,7 +915,6 @@ pub struct CpuEventRow { os_name: String, os_version: String, time: i64, - session_id: Option, // pub normalized_cpu_usage: f64, MATERIALIZED major: Option, minor: Option, @@ -1233,6 +1261,7 @@ pub struct EditEventRow { period_start: i64, period_end: i64, environment: String, + is_via_ssh: bool, } impl EditEventRow { @@ -1266,6 +1295,7 @@ impl EditEventRow { period_start: period_start.timestamp_millis(), period_end: period_end.timestamp_millis(), environment: event.environment, + is_via_ssh: event.is_via_ssh, } } } diff --git a/crates/collab/src/cents.rs b/crates/collab/src/cents.rs new file mode 100644 index 00000000000000..defbcea4e26d39 --- /dev/null +++ b/crates/collab/src/cents.rs @@ -0,0 +1,80 @@ +/// A number of cents. +#[derive( + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Clone, + Copy, + derive_more::Add, + derive_more::AddAssign, + derive_more::Sub, + derive_more::SubAssign, +)] +pub struct Cents(pub u32); + +impl Cents { + pub const ZERO: Self = Self(0); + + pub const fn new(cents: u32) -> Self { + Self(cents) + } + + pub const fn from_dollars(dollars: u32) -> Self { + Self(dollars * 100) + } + + pub fn saturating_sub(self, other: Cents) -> Self { + Self(self.0.saturating_sub(other.0)) + } +} + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + + use super::*; + + #[test] + fn test_cents_new() { + assert_eq!(Cents::new(50), Cents(50)); + } + + #[test] + fn test_cents_from_dollars() { + assert_eq!(Cents::from_dollars(1), Cents(100)); + assert_eq!(Cents::from_dollars(5), Cents(500)); + } + + #[test] + fn test_cents_zero() { + assert_eq!(Cents::ZERO, Cents(0)); + } + + #[test] + fn test_cents_add() { + assert_eq!(Cents(50) + Cents(30), Cents(80)); + } + + #[test] + fn test_cents_add_assign() { + let mut cents = Cents(50); + cents += Cents(30); + assert_eq!(cents, Cents(80)); + } + + #[test] + fn test_cents_saturating_sub() { + assert_eq!(Cents(50).saturating_sub(Cents(30)), Cents(20)); + assert_eq!(Cents(30).saturating_sub(Cents(50)), Cents(0)); + } + + #[test] + fn test_cents_ordering() { + assert!(Cents(50) > Cents(30)); + assert!(Cents(30) < Cents(50)); + assert_eq!(Cents(50), Cents(50)); + } +} diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 5c30a857389241..e9665484931434 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -35,12 +35,16 @@ use std::{ }; use time::PrimitiveDateTime; use tokio::sync::{Mutex, OwnedMutexGuard}; +use worktree_settings_file::LocalSettingsKind; #[cfg(test)] pub use tests::TestDb; pub use ids::*; pub use queries::billing_customers::{CreateBillingCustomerParams, UpdateBillingCustomerParams}; +pub use queries::billing_preferences::{ + CreateBillingPreferencesParams, UpdateBillingPreferencesParams, +}; pub use queries::billing_subscriptions::{ CreateBillingSubscriptionParams, UpdateBillingSubscriptionParams, }; @@ -766,6 +770,7 @@ pub struct Worktree { pub struct WorktreeSettingsFile { pub path: String, pub content: String, + pub kind: LocalSettingsKind, } pub struct NewExtensionVersion { @@ -783,3 +788,21 @@ pub struct ExtensionVersionConstraints { pub schema_versions: RangeInclusive, pub wasm_api_versions: RangeInclusive, } + +impl LocalSettingsKind { + pub fn from_proto(proto_kind: proto::LocalSettingsKind) -> Self { + match proto_kind { + proto::LocalSettingsKind::Settings => Self::Settings, + proto::LocalSettingsKind::Tasks => Self::Tasks, + proto::LocalSettingsKind::Editorconfig => Self::Editorconfig, + } + } + + pub fn to_proto(&self) -> proto::LocalSettingsKind { + match self { + Self::Settings => proto::LocalSettingsKind::Settings, + Self::Tasks => proto::LocalSettingsKind::Tasks, + Self::Editorconfig => proto::LocalSettingsKind::Editorconfig, + } + } +} diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs index 82ff8a56e5dc2d..3a5bcff558db83 100644 --- a/crates/collab/src/db/ids.rs +++ b/crates/collab/src/db/ids.rs @@ -32,6 +32,7 @@ macro_rules! id_type { #[allow(unused)] #[allow(missing_docs)] pub fn from_proto(value: u64) -> Self { + debug_assert!(value != 0); Self(value as i32) } @@ -71,6 +72,7 @@ macro_rules! id_type { id_type!(AccessTokenId); id_type!(BillingCustomerId); id_type!(BillingSubscriptionId); +id_type!(BillingPreferencesId); id_type!(BufferId); id_type!(ChannelBufferCollaboratorId); id_type!(ChannelChatParticipantId); @@ -104,7 +106,7 @@ pub enum ChannelRole { /// Admin can read/write and change permissions. #[sea_orm(string_value = "admin")] Admin, - /// Member can read/write, but not change pemissions. + /// Member can read/write, but not change permissions. #[sea_orm(string_value = "member")] #[default] Member, diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index 459f66d89af48e..9c277790f9f084 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -2,6 +2,7 @@ use super::*; pub mod access_tokens; pub mod billing_customers; +pub mod billing_preferences; pub mod billing_subscriptions; pub mod buffers; pub mod channels; diff --git a/crates/collab/src/db/queries/billing_preferences.rs b/crates/collab/src/db/queries/billing_preferences.rs new file mode 100644 index 00000000000000..fa35ffc068126a --- /dev/null +++ b/crates/collab/src/db/queries/billing_preferences.rs @@ -0,0 +1,75 @@ +use super::*; + +#[derive(Debug)] +pub struct CreateBillingPreferencesParams { + pub max_monthly_llm_usage_spending_in_cents: i32, +} + +#[derive(Debug, Default)] +pub struct UpdateBillingPreferencesParams { + pub max_monthly_llm_usage_spending_in_cents: ActiveValue, +} + +impl Database { + /// Returns the billing preferences for the given user, if they exist. + pub async fn get_billing_preferences( + &self, + user_id: UserId, + ) -> Result> { + self.transaction(|tx| async move { + Ok(billing_preference::Entity::find() + .filter(billing_preference::Column::UserId.eq(user_id)) + .one(&*tx) + .await?) + }) + .await + } + + /// Creates new billing preferences for the given user. + pub async fn create_billing_preferences( + &self, + user_id: UserId, + params: &CreateBillingPreferencesParams, + ) -> Result { + self.transaction(|tx| async move { + let preferences = billing_preference::Entity::insert(billing_preference::ActiveModel { + user_id: ActiveValue::set(user_id), + max_monthly_llm_usage_spending_in_cents: ActiveValue::set( + params.max_monthly_llm_usage_spending_in_cents, + ), + ..Default::default() + }) + .exec_with_returning(&*tx) + .await?; + + Ok(preferences) + }) + .await + } + + /// Updates the billing preferences for the given user. + pub async fn update_billing_preferences( + &self, + user_id: UserId, + params: &UpdateBillingPreferencesParams, + ) -> Result { + self.transaction(|tx| async move { + let preferences = billing_preference::Entity::update_many() + .set(billing_preference::ActiveModel { + max_monthly_llm_usage_spending_in_cents: params + .max_monthly_llm_usage_spending_in_cents + .clone(), + ..Default::default() + }) + .filter(billing_preference::Column::UserId.eq(user_id)) + .exec_with_returning(&*tx) + .await?; + + Ok(preferences + .into_iter() + .next() + .ok_or_else(|| anyhow!("billing preferences not found"))?) + }) + .await + } +} diff --git a/crates/collab/src/db/queries/billing_subscriptions.rs b/crates/collab/src/db/queries/billing_subscriptions.rs index 7a7ba31f166988..53a17f9c531abd 100644 --- a/crates/collab/src/db/queries/billing_subscriptions.rs +++ b/crates/collab/src/db/queries/billing_subscriptions.rs @@ -112,6 +112,37 @@ impl Database { .await } + pub async fn get_active_billing_subscriptions( + &self, + user_ids: HashSet, + ) -> Result> { + self.transaction(|tx| { + let user_ids = user_ids.clone(); + async move { + let mut rows = billing_subscription::Entity::find() + .inner_join(billing_customer::Entity) + .select_also(billing_customer::Entity) + .filter(billing_customer::Column::UserId.is_in(user_ids)) + .filter( + billing_subscription::Column::StripeSubscriptionStatus + .eq(StripeSubscriptionStatus::Active), + ) + .order_by_asc(billing_subscription::Column::Id) + .stream(&*tx) + .await?; + + let mut subscriptions = HashMap::default(); + while let Some(row) = rows.next().await { + if let (subscription, Some(customer)) = row? { + subscriptions.insert(customer.user_id, (customer, subscription)); + } + } + Ok(subscriptions) + } + }) + .await + } + /// Returns whether the user has an active billing subscription. pub async fn has_active_billing_subscription(&self, user_id: UserId) -> Result { Ok(self.count_active_billing_subscriptions(user_id).await? > 0) diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 7b19dee315476d..06ad2b45946511 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -689,9 +689,7 @@ impl Database { } let mut text_buffer = text::Buffer::new(0, text::BufferId::new(1).unwrap(), base_text); - text_buffer - .apply_ops(operations.into_iter().filter_map(operation_from_wire)) - .unwrap(); + text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire)); let base_text = text_buffer.text(); let epoch = buffer.epoch + 1; diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index a6956c84966ea3..f4eabf49791c93 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -1,3 +1,4 @@ +use anyhow::Context as _; use util::ResultExt; use super::*; @@ -30,6 +31,7 @@ impl Database { room_id: RoomId, connection: ConnectionId, worktrees: &[proto::WorktreeMetadata], + is_ssh_project: bool, dev_server_project_id: Option, ) -> Result> { self.room_transaction(room_id, |tx| async move { @@ -121,12 +123,14 @@ impl Database { .await?; } + let replica_id = if is_ssh_project { 1 } else { 0 }; + project_collaborator::ActiveModel { project_id: ActiveValue::set(project.id), connection_id: ActiveValue::set(connection.id as i32), connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)), user_id: ActiveValue::set(participant.user_id), - replica_id: ActiveValue::set(ReplicaId(0)), + replica_id: ActiveValue::set(ReplicaId(replica_id)), is_host: ActiveValue::set(true), ..Default::default() } @@ -282,7 +286,7 @@ impl Database { ) .one(&*tx) .await? - .ok_or_else(|| anyhow!("no such project"))?; + .ok_or_else(|| anyhow!("no such project: {project_id}"))?; // Update metadata. worktree::Entity::update(worktree::ActiveModel { @@ -524,6 +528,12 @@ impl Database { connection: ConnectionId, ) -> Result>> { let project_id = ProjectId::from_proto(update.project_id); + let kind = match update.kind { + Some(kind) => proto::LocalSettingsKind::from_i32(kind) + .with_context(|| format!("unknown worktree settings kind: {kind}"))?, + None => proto::LocalSettingsKind::Settings, + }; + let kind = LocalSettingsKind::from_proto(kind); self.project_transaction(project_id, |tx| async move { // Ensure the update comes from the host. let project = project::Entity::find_by_id(project_id) @@ -540,6 +550,7 @@ impl Database { worktree_id: ActiveValue::Set(update.worktree_id as i64), path: ActiveValue::Set(update.path.clone()), content: ActiveValue::Set(content.clone()), + kind: ActiveValue::Set(kind), }) .on_conflict( OnConflict::columns([ @@ -728,6 +739,11 @@ impl Database { is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, git_status: db_entry.git_status.map(|status| status as i32), + // This is only used in the summarization backlog, so if it's None, + // that just means we won't be able to detect when to resummarize + // based on total number of backlogged bytes - instead, we'd go + // on number of files only. That shouldn't be a huge deal in practice. + size: None, is_fifo: db_entry.is_fifo, }); } @@ -792,6 +808,7 @@ impl Database { worktree.settings_files.push(WorktreeSettingsFile { path: db_settings_file.path, content: db_settings_file.content, + kind: db_settings_file.kind, }); } } @@ -821,6 +838,7 @@ impl Database { .map(|language_server| proto::LanguageServer { id: language_server.id as u64, name: language_server.name, + worktree_id: None, }) .collect(), dev_server_project_id: project.dev_server_project_id, diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 1669ddbb3b7477..a0bb9fed698654 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -663,6 +663,11 @@ impl Database { is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, git_status: db_entry.git_status.map(|status| status as i32), + // This is only used in the summarization backlog, so if it's None, + // that just means we won't be able to detect when to resummarize + // based on total number of backlogged bytes - instead, we'd go + // on number of files only. That shouldn't be a huge deal in practice. + size: None, is_fifo: db_entry.is_fifo, }); } @@ -713,6 +718,7 @@ impl Database { .map(|language_server| proto::LanguageServer { id: language_server.id as u64, name: language_server.name, + worktree_id: None, }) .collect::>(); @@ -730,6 +736,7 @@ impl Database { worktree.settings_files.push(WorktreeSettingsFile { path: db_settings_file.path, content: db_settings_file.content, + kind: db_settings_file.kind, }); } } diff --git a/crates/collab/src/db/queries/users.rs b/crates/collab/src/db/queries/users.rs index b755476e338b60..4443d751542b50 100644 --- a/crates/collab/src/db/queries/users.rs +++ b/crates/collab/src/db/queries/users.rs @@ -298,6 +298,12 @@ impl Database { result } + /// Returns all feature flags. + pub async fn list_feature_flags(&self) -> Result> { + self.transaction(|tx| async move { Ok(feature_flag::Entity::find().all(&*tx).await?) }) + .await + } + /// Creates a new feature flag. pub async fn create_user_flag(&self, flag: &str, enabled_for_all: bool) -> Result { self.transaction(|tx| async move { diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index 07d070b5694bab..01d3835dc1c79a 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -1,5 +1,6 @@ pub mod access_token; pub mod billing_customer; +pub mod billing_preference; pub mod billing_subscription; pub mod buffer; pub mod buffer_operation; diff --git a/crates/collab/src/db/tables/billing_preference.rs b/crates/collab/src/db/tables/billing_preference.rs new file mode 100644 index 00000000000000..0ad92c25d6abd6 --- /dev/null +++ b/crates/collab/src/db/tables/billing_preference.rs @@ -0,0 +1,30 @@ +use crate::db::{BillingPreferencesId, UserId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "billing_preferences")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: BillingPreferencesId, + pub created_at: DateTime, + pub user_id: UserId, + pub max_monthly_llm_usage_spending_in_cents: i32, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::user::Entity", + from = "Column::UserId", + to = "super::user::Column::Id" + )] + User, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::User.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/worktree_settings_file.rs b/crates/collab/src/db/tables/worktree_settings_file.rs index 92348c1ec94366..71f7b73fc1c399 100644 --- a/crates/collab/src/db/tables/worktree_settings_file.rs +++ b/crates/collab/src/db/tables/worktree_settings_file.rs @@ -11,9 +11,25 @@ pub struct Model { #[sea_orm(primary_key)] pub path: String, pub content: String, + pub kind: LocalSettingsKind, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation {} impl ActiveModelBehavior for ActiveModel {} + +#[derive( + Copy, Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Default, Hash, serde::Serialize, +)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +#[serde(rename_all = "snake_case")] +pub enum LocalSettingsKind { + #[default] + #[sea_orm(string_value = "settings")] + Settings, + #[sea_orm(string_value = "tasks")] + Tasks, + #[sea_orm(string_value = "editorconfig")] + Editorconfig, +} diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index 55a8f216c49406..adc571580a0724 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -96,16 +96,14 @@ async fn test_channel_buffers(db: &Arc) { text::BufferId::new(1).unwrap(), buffer_response_b.base_text, ); - buffer_b - .apply_ops(buffer_response_b.operations.into_iter().map(|operation| { - let operation = proto::deserialize_operation(operation).unwrap(); - if let language::Operation::Buffer(operation) = operation { - operation - } else { - unreachable!() - } - })) - .unwrap(); + buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| { + let operation = proto::deserialize_operation(operation).unwrap(); + if let language::Operation::Buffer(operation) = operation { + operation + } else { + unreachable!() + } + })); assert_eq!(buffer_b.text(), "hello, cruel world"); diff --git a/crates/collab/src/db/tests/db_tests.rs b/crates/collab/src/db/tests/db_tests.rs index 9a4ca3c11ab314..626335028770ec 100644 --- a/crates/collab/src/db/tests/db_tests.rs +++ b/crates/collab/src/db/tests/db_tests.rs @@ -540,18 +540,18 @@ async fn test_project_count(db: &Arc) { .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], None) + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None) .await .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], None) + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None) .await .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); // Projects shared by admins aren't counted. - db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], None) + db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, None) .await .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs index 461adc3575badf..78f514adb734d9 100644 --- a/crates/collab/src/lib.rs +++ b/crates/collab/src/lib.rs @@ -1,5 +1,6 @@ pub mod api; pub mod auth; +mod cents; pub mod clickhouse; pub mod db; pub mod env; @@ -9,6 +10,7 @@ pub mod migrations; mod rate_limiter; pub mod rpc; pub mod seed; +pub mod stripe_billing; pub mod user_backfiller; #[cfg(test)] @@ -20,13 +22,17 @@ use axum::{ http::{HeaderMap, StatusCode}, response::IntoResponse, }; +pub use cents::*; use db::{ChannelId, Database}; use executor::Executor; +use llm::db::LlmDatabase; pub use rate_limiter::*; use serde::Deserialize; use std::{path::PathBuf, sync::Arc}; use util::ResultExt; +use crate::stripe_billing::StripeBilling; + pub type Result = std::result::Result; pub enum Error { @@ -170,13 +176,10 @@ pub struct Config { pub anthropic_api_key: Option>, pub anthropic_staff_api_key: Option>, pub llm_closed_beta_model_name: Option>, - pub qwen2_7b_api_key: Option>, - pub qwen2_7b_api_url: Option>, pub zed_client_checksum_seed: Option, pub slack_panics_webhook: Option, pub auto_join_channel_id: Option, pub stripe_api_key: Option, - pub stripe_price_id: Option>, pub supermaven_admin_api_key: Option>, pub user_backfiller_github_access_token: Option>, } @@ -233,10 +236,7 @@ impl Config { migrations_path: None, seed_path: None, stripe_api_key: None, - stripe_price_id: None, supermaven_admin_api_key: None, - qwen2_7b_api_key: None, - qwen2_7b_api_url: None, user_backfiller_github_access_token: None, } } @@ -268,9 +268,11 @@ impl ServiceMode { pub struct AppState { pub db: Arc, + pub llm_db: Option>, pub live_kit_client: Option>, pub blob_store_client: Option, pub stripe_client: Option>, + pub stripe_billing: Option>, pub rate_limiter: Arc, pub executor: Executor, pub clickhouse_client: Option<::clickhouse::Client>, @@ -284,6 +286,20 @@ impl AppState { let mut db = Database::new(db_options, Executor::Production).await?; db.initialize_notification_kinds().await?; + let llm_db = if let Some((llm_database_url, llm_database_max_connections)) = config + .llm_database_url + .clone() + .zip(config.llm_database_max_connections) + { + let mut llm_db_options = db::ConnectOptions::new(llm_database_url); + llm_db_options.max_connections(llm_database_max_connections); + let mut llm_db = LlmDatabase::new(llm_db_options, executor.clone()).await?; + llm_db.initialize().await?; + Some(Arc::new(llm_db)) + } else { + None + }; + let live_kit_client = if let Some(((server, key), secret)) = config .live_kit_server .as_ref() @@ -300,11 +316,16 @@ impl AppState { }; let db = Arc::new(db); + let stripe_client = build_stripe_client(&config).map(Arc::new).log_err(); let this = Self { db: db.clone(), + llm_db, live_kit_client, blob_store_client: build_blob_store_client(&config).await.log_err(), - stripe_client: build_stripe_client(&config).await.map(Arc::new).log_err(), + stripe_billing: stripe_client + .clone() + .map(|stripe_client| Arc::new(StripeBilling::new(stripe_client))), + stripe_client, rate_limiter: Arc::new(RateLimiter::new(db)), executor, clickhouse_client: config @@ -317,12 +338,11 @@ impl AppState { } } -async fn build_stripe_client(config: &Config) -> anyhow::Result { +fn build_stripe_client(config: &Config) -> anyhow::Result { let api_key = config .stripe_api_key .as_ref() .ok_or_else(|| anyhow!("missing stripe_api_key"))?; - Ok(stripe::Client::new(api_key)) } diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index e1a345436810ea..9ee31ab3d15cff 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -4,7 +4,7 @@ mod telemetry; mod token; use crate::{ - api::CloudflareIpCountryHeader, build_clickhouse_client, db::UserId, executor::Executor, + api::CloudflareIpCountryHeader, build_clickhouse_client, db::UserId, executor::Executor, Cents, Config, Error, Result, }; use anyhow::{anyhow, Context as _}; @@ -20,13 +20,14 @@ use axum::{ }; use chrono::{DateTime, Duration, Utc}; use collections::HashMap; +use db::TokenUsage; use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase}; use futures::{Stream, StreamExt as _}; -use http_client::IsahcHttpClient; -use rpc::ListModelsResponse; +use reqwest_client::ReqwestClient; use rpc::{ proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME, }; +use rpc::{ListModelsResponse, MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME}; use std::{ pin::Pin, sync::Arc, @@ -43,7 +44,7 @@ pub struct LlmState { pub config: Config, pub executor: Executor, pub db: Arc, - pub http_client: IsahcHttpClient, + pub http_client: ReqwestClient, pub clickhouse_client: Option, active_user_count_by_model: RwLock, ActiveUserCount)>>, @@ -69,10 +70,8 @@ impl LlmState { let db = Arc::new(db); let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); - let http_client = IsahcHttpClient::builder() - .default_header("User-Agent", user_agent) - .build() - .context("failed to construct http client")?; + let http_client = + ReqwestClient::user_agent(&user_agent).context("failed to construct http client")?; let this = Self { executor, @@ -319,22 +318,31 @@ async fn perform_completion( chunks .map(move |event| { let chunk = event?; - let (input_tokens, output_tokens) = match &chunk { + let ( + input_tokens, + output_tokens, + cache_creation_input_tokens, + cache_read_input_tokens, + ) = match &chunk { anthropic::Event::MessageStart { message: anthropic::Response { usage, .. }, } | anthropic::Event::MessageDelta { usage, .. } => ( usage.input_tokens.unwrap_or(0) as usize, usage.output_tokens.unwrap_or(0) as usize, + usage.cache_creation_input_tokens.unwrap_or(0) as usize, + usage.cache_read_input_tokens.unwrap_or(0) as usize, ), - _ => (0, 0), + _ => (0, 0, 0, 0), }; - anyhow::Ok(( - serde_json::to_vec(&chunk).unwrap(), + anyhow::Ok(CompletionChunk { + bytes: serde_json::to_vec(&chunk).unwrap(), input_tokens, output_tokens, - )) + cache_creation_input_tokens, + cache_read_input_tokens, + }) }) .boxed() } @@ -360,11 +368,13 @@ async fn perform_completion( chunk.usage.as_ref().map_or(0, |u| u.prompt_tokens) as usize; let output_tokens = chunk.usage.as_ref().map_or(0, |u| u.completion_tokens) as usize; - ( - serde_json::to_vec(&chunk).unwrap(), + CompletionChunk { + bytes: serde_json::to_vec(&chunk).unwrap(), input_tokens, output_tokens, - ) + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + } }) }) .boxed() @@ -388,49 +398,13 @@ async fn perform_completion( .map(|event| { event.map(|chunk| { // TODO - implement token counting for Google AI - let input_tokens = 0; - let output_tokens = 0; - ( - serde_json::to_vec(&chunk).unwrap(), - input_tokens, - output_tokens, - ) - }) - }) - .boxed() - } - LanguageModelProvider::Zed => { - let api_key = state - .config - .qwen2_7b_api_key - .as_ref() - .context("no Qwen2-7B API key configured on the server")?; - let api_url = state - .config - .qwen2_7b_api_url - .as_ref() - .context("no Qwen2-7B URL configured on the server")?; - let chunks = open_ai::stream_completion( - &state.http_client, - api_url, - api_key, - serde_json::from_str(params.provider_request.get())?, - None, - ) - .await?; - - chunks - .map(|event| { - event.map(|chunk| { - let input_tokens = - chunk.usage.as_ref().map_or(0, |u| u.prompt_tokens) as usize; - let output_tokens = - chunk.usage.as_ref().map_or(0, |u| u.completion_tokens) as usize; - ( - serde_json::to_vec(&chunk).unwrap(), - input_tokens, - output_tokens, - ) + CompletionChunk { + bytes: serde_json::to_vec(&chunk).unwrap(), + input_tokens: 0, + output_tokens: 0, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + } }) }) .boxed() @@ -442,8 +416,7 @@ async fn perform_completion( claims, provider: params.provider, model, - input_tokens: 0, - output_tokens: 0, + tokens: TokenUsage::default(), inner_stream: stream, }))) } @@ -460,10 +433,15 @@ fn normalize_model_name(known_models: Vec, name: String) -> String { } } -/// The maximum lifetime spending an individual user can reach before being cut off. +/// The maximum monthly spending an individual user can reach on the free tier +/// before they have to pay. +pub const FREE_TIER_MONTHLY_SPENDING_LIMIT: Cents = Cents::from_dollars(10); + +/// The default value to use for maximum spend per month if the user did not +/// explicitly set a maximum spend. /// -/// Represented in cents. -const LIFETIME_SPENDING_LIMIT_IN_CENTS: usize = 1_000 * 100; +/// Used to prevent surprise bills. +pub const DEFAULT_MAX_MONTHLY_SPEND: Cents = Cents::from_dollars(10); async fn check_usage_limit( state: &Arc, @@ -482,11 +460,28 @@ async fn check_usage_limit( ) .await?; - if usage.lifetime_spending >= LIFETIME_SPENDING_LIMIT_IN_CENTS { - return Err(Error::http( - StatusCode::FORBIDDEN, - "Maximum spending limit reached.".to_string(), - )); + if usage.spending_this_month >= FREE_TIER_MONTHLY_SPENDING_LIMIT { + if !claims.has_llm_subscription { + return Err(Error::http( + StatusCode::PAYMENT_REQUIRED, + "Maximum spending limit reached for this month.".to_string(), + )); + } + + if (usage.spending_this_month - FREE_TIER_MONTHLY_SPENDING_LIMIT) + >= Cents(claims.max_monthly_spend_in_cents) + { + return Err(Error::Http( + StatusCode::FORBIDDEN, + "Maximum spending limit reached for this month.".to_string(), + [( + HeaderName::from_static(MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME), + HeaderValue::from_static("true"), + )] + .into_iter() + .collect(), + )); + } } let active_users = state.get_active_user_count(provider, model_name).await?; @@ -529,7 +524,6 @@ async fn check_usage_limit( UsageMeasure::RequestsPerMinute => "requests_per_minute", UsageMeasure::TokensPerMinute => "tokens_per_minute", UsageMeasure::TokensPerDay => "tokens_per_day", - _ => "", }; if let Some(client) = state.clickhouse_client.as_ref() { @@ -588,29 +582,38 @@ async fn check_usage_limit( Ok(()) } +struct CompletionChunk { + bytes: Vec, + input_tokens: usize, + output_tokens: usize, + cache_creation_input_tokens: usize, + cache_read_input_tokens: usize, +} + struct TokenCountingStream { state: Arc, claims: LlmTokenClaims, provider: LanguageModelProvider, model: String, - input_tokens: usize, - output_tokens: usize, + tokens: TokenUsage, inner_stream: S, } impl Stream for TokenCountingStream where - S: Stream, usize, usize), anyhow::Error>> + Unpin, + S: Stream> + Unpin, { type Item = Result, anyhow::Error>; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { match Pin::new(&mut self.inner_stream).poll_next(cx) { - Poll::Ready(Some(Ok((mut bytes, input_tokens, output_tokens)))) => { - bytes.push(b'\n'); - self.input_tokens += input_tokens; - self.output_tokens += output_tokens; - Poll::Ready(Some(Ok(bytes))) + Poll::Ready(Some(Ok(mut chunk))) => { + chunk.bytes.push(b'\n'); + self.tokens.input += chunk.input_tokens; + self.tokens.output += chunk.output_tokens; + self.tokens.input_cache_creation += chunk.cache_creation_input_tokens; + self.tokens.input_cache_read += chunk.cache_read_input_tokens; + Poll::Ready(Some(Ok(chunk.bytes))) } Poll::Ready(Some(Err(e))) => Poll::Ready(Some(Err(e))), Poll::Ready(None) => Poll::Ready(None), @@ -625,8 +628,7 @@ impl Drop for TokenCountingStream { let claims = self.claims.clone(); let provider = self.provider; let model = std::mem::take(&mut self.model); - let input_token_count = self.input_tokens; - let output_token_count = self.output_tokens; + let tokens = self.tokens; self.state.executor.spawn_detached(async move { let usage = state .db @@ -635,8 +637,9 @@ impl Drop for TokenCountingStream { claims.is_staff, provider, &model, - input_token_count, - output_token_count, + tokens, + claims.has_llm_subscription, + Cents(claims.max_monthly_spend_in_cents), Utc::now(), ) .await @@ -666,15 +669,25 @@ impl Drop for TokenCountingStream { }, model, provider: provider.to_string(), - input_token_count: input_token_count as u64, - output_token_count: output_token_count as u64, + input_token_count: tokens.input as u64, + cache_creation_input_token_count: tokens.input_cache_creation as u64, + cache_read_input_token_count: tokens.input_cache_read as u64, + output_token_count: tokens.output as u64, requests_this_minute: usage.requests_this_minute as u64, tokens_this_minute: usage.tokens_this_minute as u64, tokens_this_day: usage.tokens_this_day as u64, - input_tokens_this_month: usage.input_tokens_this_month as u64, - output_tokens_this_month: usage.output_tokens_this_month as u64, - spending_this_month: usage.spending_this_month as u64, - lifetime_spending: usage.lifetime_spending as u64, + input_tokens_this_month: usage.tokens_this_month.input as u64, + cache_creation_input_tokens_this_month: usage + .tokens_this_month + .input_cache_creation + as u64, + cache_read_input_tokens_this_month: usage + .tokens_this_month + .input_cache_read + as u64, + output_tokens_this_month: usage.tokens_this_month.output as u64, + spending_this_month: usage.spending_this_month.0 as u64, + lifetime_spending: usage.lifetime_spending.0 as u64, }, ) .await diff --git a/crates/collab/src/llm/authorization.rs b/crates/collab/src/llm/authorization.rs index cc345579eca229..9f82af51c39b73 100644 --- a/crates/collab/src/llm/authorization.rs +++ b/crates/collab/src/llm/authorization.rs @@ -77,7 +77,6 @@ fn authorize_access_for_country( LanguageModelProvider::Anthropic => anthropic::is_supported_country(country_code), LanguageModelProvider::OpenAi => open_ai::is_supported_country(country_code), LanguageModelProvider::Google => google_ai::is_supported_country(country_code), - LanguageModelProvider::Zed => true, }; if !is_country_supported_by_provider { Err(Error::http( @@ -213,7 +212,6 @@ mod tests { (LanguageModelProvider::Anthropic, "T1"), // Tor (LanguageModelProvider::OpenAi, "T1"), // Tor (LanguageModelProvider::Google, "T1"), // Tor - (LanguageModelProvider::Zed, "T1"), // Tor ]; for (provider, country_code) in cases { diff --git a/crates/collab/src/llm/db.rs b/crates/collab/src/llm/db.rs index d46f51bb0df594..4374214c1b52fe 100644 --- a/crates/collab/src/llm/db.rs +++ b/crates/collab/src/llm/db.rs @@ -20,7 +20,7 @@ use std::future::Future; use std::sync::Arc; use anyhow::anyhow; -pub use queries::usages::ActiveUserCount; +pub use queries::usages::{ActiveUserCount, TokenUsage}; use sea_orm::prelude::*; pub use sea_orm::ConnectOptions; use sea_orm::{ @@ -97,6 +97,14 @@ impl LlmDatabase { .ok_or_else(|| anyhow!("unknown model {provider:?}:{name}"))?) } + pub fn model_by_id(&self, id: ModelId) -> Result<&model::Model> { + Ok(self + .models + .values() + .find(|model| model.id == id) + .ok_or_else(|| anyhow!("no model for ID {id:?}"))?) + } + pub fn options(&self) -> &ConnectOptions { &self.options } diff --git a/crates/collab/src/llm/db/ids.rs b/crates/collab/src/llm/db/ids.rs index 8cc8a0f9746ef7..67f2a49380796c 100644 --- a/crates/collab/src/llm/db/ids.rs +++ b/crates/collab/src/llm/db/ids.rs @@ -3,8 +3,9 @@ use serde::{Deserialize, Serialize}; use crate::id_type; +id_type!(BillingEventId); id_type!(ModelId); id_type!(ProviderId); +id_type!(RevokedAccessTokenId); id_type!(UsageId); id_type!(UsageMeasureId); -id_type!(RevokedAccessTokenId); diff --git a/crates/collab/src/llm/db/queries.rs b/crates/collab/src/llm/db/queries.rs index 907d0589f34334..79a17999b79476 100644 --- a/crates/collab/src/llm/db/queries.rs +++ b/crates/collab/src/llm/db/queries.rs @@ -1,5 +1,6 @@ use super::*; +pub mod billing_events; pub mod providers; pub mod revoked_access_tokens; pub mod usages; diff --git a/crates/collab/src/llm/db/queries/billing_events.rs b/crates/collab/src/llm/db/queries/billing_events.rs new file mode 100644 index 00000000000000..400477f2340e86 --- /dev/null +++ b/crates/collab/src/llm/db/queries/billing_events.rs @@ -0,0 +1,31 @@ +use super::*; +use crate::Result; +use anyhow::Context as _; + +impl LlmDatabase { + pub async fn get_billing_events(&self) -> Result> { + self.transaction(|tx| async move { + let events_with_models = billing_event::Entity::find() + .find_also_related(model::Entity) + .all(&*tx) + .await?; + events_with_models + .into_iter() + .map(|(event, model)| { + let model = + model.context("could not find model associated with billing event")?; + Ok((event, model)) + }) + .collect() + }) + .await + } + + pub async fn consume_billing_event(&self, id: BillingEventId) -> Result<()> { + self.transaction(|tx| async move { + billing_event::Entity::delete_by_id(id).exec(&*tx).await?; + Ok(()) + }) + .await + } +} diff --git a/crates/collab/src/llm/db/queries/providers.rs b/crates/collab/src/llm/db/queries/providers.rs index 8a73b399c67ade..7e51061ceef2ee 100644 --- a/crates/collab/src/llm/db/queries/providers.rs +++ b/crates/collab/src/llm/db/queries/providers.rs @@ -1,5 +1,5 @@ use super::*; -use sea_orm::QueryOrder; +use sea_orm::{sea_query::OnConflict, QueryOrder}; use std::str::FromStr; use strum::IntoEnumIterator as _; @@ -99,6 +99,17 @@ impl LlmDatabase { ..Default::default() } })) + .on_conflict( + OnConflict::columns([model::Column::ProviderId, model::Column::Name]) + .update_columns([ + model::Column::MaxRequestsPerMinute, + model::Column::MaxTokensPerMinute, + model::Column::MaxTokensPerDay, + model::Column::PricePerMillionInputTokens, + model::Column::PricePerMillionOutputTokens, + ]) + .to_owned(), + ) .exec_without_returning(&*tx) .await?; Ok(()) diff --git a/crates/collab/src/llm/db/queries/usages.rs b/crates/collab/src/llm/db/queries/usages.rs index 65a0bd67345bd0..5883bcef571321 100644 --- a/crates/collab/src/llm/db/queries/usages.rs +++ b/crates/collab/src/llm/db/queries/usages.rs @@ -1,5 +1,6 @@ -use crate::db::UserId; -use chrono::Duration; +use crate::llm::Cents; +use crate::{db::UserId, llm::FREE_TIER_MONTHLY_SPENDING_LIMIT}; +use chrono::{Datelike, Duration}; use futures::StreamExt as _; use rpc::LanguageModelProvider; use sea_orm::QuerySelect; @@ -8,15 +9,28 @@ use strum::IntoEnumIterator as _; use super::*; +#[derive(Debug, PartialEq, Clone, Copy, Default)] +pub struct TokenUsage { + pub input: usize, + pub input_cache_creation: usize, + pub input_cache_read: usize, + pub output: usize, +} + +impl TokenUsage { + pub fn total(&self) -> usize { + self.input + self.input_cache_creation + self.input_cache_read + self.output + } +} + #[derive(Debug, PartialEq, Clone, Copy)] pub struct Usage { pub requests_this_minute: usize, pub tokens_this_minute: usize, pub tokens_this_day: usize, - pub input_tokens_this_month: usize, - pub output_tokens_this_month: usize, - pub spending_this_month: usize, - pub lifetime_spending: usize, + pub tokens_this_month: TokenUsage, + pub spending_this_month: Cents, + pub lifetime_spending: Cents, } #[derive(Debug, PartialEq, Clone)] @@ -138,6 +152,46 @@ impl LlmDatabase { .await } + pub async fn get_user_spending_for_month( + &self, + user_id: UserId, + now: DateTimeUtc, + ) -> Result { + self.transaction(|tx| async move { + let month = now.date_naive().month() as i32; + let year = now.date_naive().year(); + + let mut monthly_usages = monthly_usage::Entity::find() + .filter( + monthly_usage::Column::UserId + .eq(user_id) + .and(monthly_usage::Column::Month.eq(month)) + .and(monthly_usage::Column::Year.eq(year)), + ) + .stream(&*tx) + .await?; + let mut monthly_spending = Cents::ZERO; + + while let Some(usage) = monthly_usages.next().await { + let usage = usage?; + let Ok(model) = self.model_by_id(usage.model_id) else { + continue; + }; + + monthly_spending += calculate_spending( + model, + usage.input_tokens as usize, + usage.cache_creation_input_tokens as usize, + usage.cache_read_input_tokens as usize, + usage.output_tokens as usize, + ); + } + + Ok(monthly_spending) + }) + .await + } + pub async fn get_usage( &self, user_id: UserId, @@ -160,17 +214,26 @@ impl LlmDatabase { .all(&*tx) .await?; - let (lifetime_input_tokens, lifetime_output_tokens) = lifetime_usage::Entity::find() + let month = now.date_naive().month() as i32; + let year = now.date_naive().year(); + let monthly_usage = monthly_usage::Entity::find() + .filter( + monthly_usage::Column::UserId + .eq(user_id) + .and(monthly_usage::Column::ModelId.eq(model.id)) + .and(monthly_usage::Column::Month.eq(month)) + .and(monthly_usage::Column::Year.eq(year)), + ) + .one(&*tx) + .await?; + let lifetime_usage = lifetime_usage::Entity::find() .filter( lifetime_usage::Column::UserId .eq(user_id) .and(lifetime_usage::Column::ModelId.eq(model.id)), ) .one(&*tx) - .await? - .map_or((0, 0), |usage| { - (usage.input_tokens as usize, usage.output_tokens as usize) - }); + .await?; let requests_this_minute = self.get_usage_for_measure(&usages, now, UsageMeasure::RequestsPerMinute)?; @@ -178,21 +241,47 @@ impl LlmDatabase { self.get_usage_for_measure(&usages, now, UsageMeasure::TokensPerMinute)?; let tokens_this_day = self.get_usage_for_measure(&usages, now, UsageMeasure::TokensPerDay)?; - let input_tokens_this_month = - self.get_usage_for_measure(&usages, now, UsageMeasure::InputTokensPerMonth)?; - let output_tokens_this_month = - self.get_usage_for_measure(&usages, now, UsageMeasure::OutputTokensPerMonth)?; - let spending_this_month = - calculate_spending(model, input_tokens_this_month, output_tokens_this_month); - let lifetime_spending = - calculate_spending(model, lifetime_input_tokens, lifetime_output_tokens); + let spending_this_month = if let Some(monthly_usage) = &monthly_usage { + calculate_spending( + model, + monthly_usage.input_tokens as usize, + monthly_usage.cache_creation_input_tokens as usize, + monthly_usage.cache_read_input_tokens as usize, + monthly_usage.output_tokens as usize, + ) + } else { + Cents::ZERO + }; + let lifetime_spending = if let Some(lifetime_usage) = &lifetime_usage { + calculate_spending( + model, + lifetime_usage.input_tokens as usize, + lifetime_usage.cache_creation_input_tokens as usize, + lifetime_usage.cache_read_input_tokens as usize, + lifetime_usage.output_tokens as usize, + ) + } else { + Cents::ZERO + }; Ok(Usage { requests_this_minute, tokens_this_minute, tokens_this_day, - input_tokens_this_month, - output_tokens_this_month, + tokens_this_month: TokenUsage { + input: monthly_usage + .as_ref() + .map_or(0, |usage| usage.input_tokens as usize), + input_cache_creation: monthly_usage + .as_ref() + .map_or(0, |usage| usage.cache_creation_input_tokens as usize), + input_cache_read: monthly_usage + .as_ref() + .map_or(0, |usage| usage.cache_read_input_tokens as usize), + output: monthly_usage + .as_ref() + .map_or(0, |usage| usage.output_tokens as usize), + }, spending_this_month, lifetime_spending, }) @@ -207,8 +296,9 @@ impl LlmDatabase { is_staff: bool, provider: LanguageModelProvider, model_name: &str, - input_token_count: usize, - output_token_count: usize, + tokens: TokenUsage, + has_llm_subscription: bool, + max_monthly_spend: Cents, now: DateTimeUtc, ) -> Result { self.transaction(|tx| async move { @@ -243,7 +333,7 @@ impl LlmDatabase { &usages, UsageMeasure::TokensPerMinute, now, - input_token_count + output_token_count, + tokens.total(), &tx, ) .await?; @@ -255,36 +345,90 @@ impl LlmDatabase { &usages, UsageMeasure::TokensPerDay, now, - input_token_count + output_token_count, + tokens.total(), &tx, ) .await?; - let input_tokens_this_month = self - .update_usage_for_measure( - user_id, - is_staff, - model.id, - &usages, - UsageMeasure::InputTokensPerMonth, - now, - input_token_count, - &tx, + + let month = now.date_naive().month() as i32; + let year = now.date_naive().year(); + + // Update monthly usage + let monthly_usage = monthly_usage::Entity::find() + .filter( + monthly_usage::Column::UserId + .eq(user_id) + .and(monthly_usage::Column::ModelId.eq(model.id)) + .and(monthly_usage::Column::Month.eq(month)) + .and(monthly_usage::Column::Year.eq(year)), ) + .one(&*tx) .await?; - let output_tokens_this_month = self - .update_usage_for_measure( - user_id, - is_staff, - model.id, - &usages, - UsageMeasure::OutputTokensPerMonth, - now, - output_token_count, - &tx, - ) + + let monthly_usage = match monthly_usage { + Some(usage) => { + monthly_usage::Entity::update(monthly_usage::ActiveModel { + id: ActiveValue::unchanged(usage.id), + input_tokens: ActiveValue::set(usage.input_tokens + tokens.input as i64), + cache_creation_input_tokens: ActiveValue::set( + usage.cache_creation_input_tokens + tokens.input_cache_creation as i64, + ), + cache_read_input_tokens: ActiveValue::set( + usage.cache_read_input_tokens + tokens.input_cache_read as i64, + ), + output_tokens: ActiveValue::set(usage.output_tokens + tokens.output as i64), + ..Default::default() + }) + .exec(&*tx) + .await? + } + None => { + monthly_usage::ActiveModel { + user_id: ActiveValue::set(user_id), + model_id: ActiveValue::set(model.id), + month: ActiveValue::set(month), + year: ActiveValue::set(year), + input_tokens: ActiveValue::set(tokens.input as i64), + cache_creation_input_tokens: ActiveValue::set( + tokens.input_cache_creation as i64, + ), + cache_read_input_tokens: ActiveValue::set(tokens.input_cache_read as i64), + output_tokens: ActiveValue::set(tokens.output as i64), + ..Default::default() + } + .insert(&*tx) + .await? + } + }; + + let spending_this_month = calculate_spending( + model, + monthly_usage.input_tokens as usize, + monthly_usage.cache_creation_input_tokens as usize, + monthly_usage.cache_read_input_tokens as usize, + monthly_usage.output_tokens as usize, + ); + + if !is_staff + && spending_this_month > FREE_TIER_MONTHLY_SPENDING_LIMIT + && has_llm_subscription + && (spending_this_month - FREE_TIER_MONTHLY_SPENDING_LIMIT) <= max_monthly_spend + { + billing_event::ActiveModel { + id: ActiveValue::not_set(), + idempotency_key: ActiveValue::not_set(), + user_id: ActiveValue::set(user_id), + model_id: ActiveValue::set(model.id), + input_tokens: ActiveValue::set(tokens.input as i64), + input_cache_creation_tokens: ActiveValue::set( + tokens.input_cache_creation as i64, + ), + input_cache_read_tokens: ActiveValue::set(tokens.input_cache_read as i64), + output_tokens: ActiveValue::set(tokens.output as i64), + } + .insert(&*tx) .await?; - let spending_this_month = - calculate_spending(model, input_tokens_this_month, output_tokens_this_month); + } // Update lifetime usage let lifetime_usage = lifetime_usage::Entity::find() @@ -300,12 +444,14 @@ impl LlmDatabase { Some(usage) => { lifetime_usage::Entity::update(lifetime_usage::ActiveModel { id: ActiveValue::unchanged(usage.id), - input_tokens: ActiveValue::set( - usage.input_tokens + input_token_count as i64, + input_tokens: ActiveValue::set(usage.input_tokens + tokens.input as i64), + cache_creation_input_tokens: ActiveValue::set( + usage.cache_creation_input_tokens + tokens.input_cache_creation as i64, ), - output_tokens: ActiveValue::set( - usage.output_tokens + output_token_count as i64, + cache_read_input_tokens: ActiveValue::set( + usage.cache_read_input_tokens + tokens.input_cache_read as i64, ), + output_tokens: ActiveValue::set(usage.output_tokens + tokens.output as i64), ..Default::default() }) .exec(&*tx) @@ -315,8 +461,12 @@ impl LlmDatabase { lifetime_usage::ActiveModel { user_id: ActiveValue::set(user_id), model_id: ActiveValue::set(model.id), - input_tokens: ActiveValue::set(input_token_count as i64), - output_tokens: ActiveValue::set(output_token_count as i64), + input_tokens: ActiveValue::set(tokens.input as i64), + cache_creation_input_tokens: ActiveValue::set( + tokens.input_cache_creation as i64, + ), + cache_read_input_tokens: ActiveValue::set(tokens.input_cache_read as i64), + output_tokens: ActiveValue::set(tokens.output as i64), ..Default::default() } .insert(&*tx) @@ -327,6 +477,8 @@ impl LlmDatabase { let lifetime_spending = calculate_spending( model, lifetime_usage.input_tokens as usize, + lifetime_usage.cache_creation_input_tokens as usize, + lifetime_usage.cache_read_input_tokens as usize, lifetime_usage.output_tokens as usize, ); @@ -334,8 +486,12 @@ impl LlmDatabase { requests_this_minute, tokens_this_minute, tokens_this_day, - input_tokens_this_month, - output_tokens_this_month, + tokens_this_month: TokenUsage { + input: monthly_usage.input_tokens as usize, + input_cache_creation: monthly_usage.cache_creation_input_tokens as usize, + input_cache_read: monthly_usage.cache_read_input_tokens as usize, + output: monthly_usage.output_tokens as usize, + }, spending_this_month, lifetime_spending, }) @@ -501,18 +657,29 @@ impl LlmDatabase { fn calculate_spending( model: &model::Model, input_tokens_this_month: usize, + cache_creation_input_tokens_this_month: usize, + cache_read_input_tokens_this_month: usize, output_tokens_this_month: usize, -) -> usize { +) -> Cents { let input_token_cost = input_tokens_this_month * model.price_per_million_input_tokens as usize / 1_000_000; + let cache_creation_input_token_cost = cache_creation_input_tokens_this_month + * model.price_per_million_cache_creation_input_tokens as usize + / 1_000_000; + let cache_read_input_token_cost = cache_read_input_tokens_this_month + * model.price_per_million_cache_read_input_tokens as usize + / 1_000_000; let output_token_cost = output_tokens_this_month * model.price_per_million_output_tokens as usize / 1_000_000; - input_token_cost + output_token_cost + let spending = input_token_cost + + cache_creation_input_token_cost + + cache_read_input_token_cost + + output_token_cost; + Cents::new(spending as u32) } const MINUTE_BUCKET_COUNT: usize = 12; const DAY_BUCKET_COUNT: usize = 48; -const MONTH_BUCKET_COUNT: usize = 30; impl UsageMeasure { fn bucket_count(&self) -> usize { @@ -520,8 +687,6 @@ impl UsageMeasure { UsageMeasure::RequestsPerMinute => MINUTE_BUCKET_COUNT, UsageMeasure::TokensPerMinute => MINUTE_BUCKET_COUNT, UsageMeasure::TokensPerDay => DAY_BUCKET_COUNT, - UsageMeasure::InputTokensPerMonth => MONTH_BUCKET_COUNT, - UsageMeasure::OutputTokensPerMonth => MONTH_BUCKET_COUNT, } } @@ -530,8 +695,6 @@ impl UsageMeasure { UsageMeasure::RequestsPerMinute => Duration::minutes(1), UsageMeasure::TokensPerMinute => Duration::minutes(1), UsageMeasure::TokensPerDay => Duration::hours(24), - UsageMeasure::InputTokensPerMonth => Duration::days(30), - UsageMeasure::OutputTokensPerMonth => Duration::days(30), } } diff --git a/crates/collab/src/llm/db/tables.rs b/crates/collab/src/llm/db/tables.rs index 4beefe2b5d45d7..407c5c8fd040db 100644 --- a/crates/collab/src/llm/db/tables.rs +++ b/crates/collab/src/llm/db/tables.rs @@ -1,5 +1,7 @@ +pub mod billing_event; pub mod lifetime_usage; pub mod model; +pub mod monthly_usage; pub mod provider; pub mod revoked_access_token; pub mod usage; diff --git a/crates/collab/src/llm/db/tables/billing_event.rs b/crates/collab/src/llm/db/tables/billing_event.rs new file mode 100644 index 00000000000000..93987bc71e5aaa --- /dev/null +++ b/crates/collab/src/llm/db/tables/billing_event.rs @@ -0,0 +1,37 @@ +use crate::{ + db::UserId, + llm::db::{BillingEventId, ModelId}, +}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] +#[sea_orm(table_name = "billing_events")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: BillingEventId, + pub idempotency_key: Uuid, + pub user_id: UserId, + pub model_id: ModelId, + pub input_tokens: i64, + pub input_cache_creation_tokens: i64, + pub input_cache_read_tokens: i64, + pub output_tokens: i64, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::model::Entity", + from = "Column::ModelId", + to = "super::model::Column::Id" + )] + Model, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Model.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/llm/db/tables/lifetime_usage.rs b/crates/collab/src/llm/db/tables/lifetime_usage.rs index 05ad2d5e94c1fa..fc8354699b2309 100644 --- a/crates/collab/src/llm/db/tables/lifetime_usage.rs +++ b/crates/collab/src/llm/db/tables/lifetime_usage.rs @@ -9,6 +9,8 @@ pub struct Model { pub user_id: UserId, pub model_id: ModelId, pub input_tokens: i64, + pub cache_creation_input_tokens: i64, + pub cache_read_input_tokens: i64, pub output_tokens: i64, } diff --git a/crates/collab/src/llm/db/tables/model.rs b/crates/collab/src/llm/db/tables/model.rs index c87789f27e2fc6..6c521841854908 100644 --- a/crates/collab/src/llm/db/tables/model.rs +++ b/crates/collab/src/llm/db/tables/model.rs @@ -14,6 +14,8 @@ pub struct Model { pub max_tokens_per_minute: i64, pub max_tokens_per_day: i64, pub price_per_million_input_tokens: i32, + pub price_per_million_cache_creation_input_tokens: i32, + pub price_per_million_cache_read_input_tokens: i32, pub price_per_million_output_tokens: i32, } @@ -27,6 +29,8 @@ pub enum Relation { Provider, #[sea_orm(has_many = "super::usage::Entity")] Usages, + #[sea_orm(has_many = "super::billing_event::Entity")] + BillingEvents, } impl Related for Entity { @@ -41,4 +45,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::BillingEvents.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/llm/db/tables/monthly_usage.rs b/crates/collab/src/llm/db/tables/monthly_usage.rs new file mode 100644 index 00000000000000..1e849f6aefc585 --- /dev/null +++ b/crates/collab/src/llm/db/tables/monthly_usage.rs @@ -0,0 +1,22 @@ +use crate::{db::UserId, llm::db::ModelId}; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] +#[sea_orm(table_name = "monthly_usages")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i32, + pub user_id: UserId, + pub model_id: ModelId, + pub month: i32, + pub year: i32, + pub input_tokens: i64, + pub cache_creation_input_tokens: i64, + pub cache_read_input_tokens: i64, + pub output_tokens: i64, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/llm/db/tables/usage_measure.rs b/crates/collab/src/llm/db/tables/usage_measure.rs index 1105d997c2bcea..b0e5b866447ed0 100644 --- a/crates/collab/src/llm/db/tables/usage_measure.rs +++ b/crates/collab/src/llm/db/tables/usage_measure.rs @@ -9,8 +9,6 @@ pub enum UsageMeasure { RequestsPerMinute, TokensPerMinute, TokensPerDay, - InputTokensPerMonth, - OutputTokensPerMonth, } #[derive(Clone, Debug, PartialEq, DeriveEntityModel)] diff --git a/crates/collab/src/llm/db/tests.rs b/crates/collab/src/llm/db/tests.rs index 5fba2a24fd157d..963e5bcb0a30a7 100644 --- a/crates/collab/src/llm/db/tests.rs +++ b/crates/collab/src/llm/db/tests.rs @@ -1,3 +1,4 @@ +mod billing_tests; mod provider_tests; mod usage_tests; diff --git a/crates/collab/src/llm/db/tests/billing_tests.rs b/crates/collab/src/llm/db/tests/billing_tests.rs new file mode 100644 index 00000000000000..88551dd5f85e9d --- /dev/null +++ b/crates/collab/src/llm/db/tests/billing_tests.rs @@ -0,0 +1,148 @@ +use crate::{ + db::UserId, + llm::{ + db::{queries::providers::ModelParams, LlmDatabase, TokenUsage}, + FREE_TIER_MONTHLY_SPENDING_LIMIT, + }, + test_llm_db, Cents, +}; +use chrono::{DateTime, Utc}; +use pretty_assertions::assert_eq; +use rpc::LanguageModelProvider; + +test_llm_db!( + test_billing_limit_exceeded, + test_billing_limit_exceeded_postgres +); + +async fn test_billing_limit_exceeded(db: &mut LlmDatabase) { + let provider = LanguageModelProvider::Anthropic; + let model = "fake-claude-limerick"; + const PRICE_PER_MILLION_INPUT_TOKENS: i32 = 5; + const PRICE_PER_MILLION_OUTPUT_TOKENS: i32 = 5; + + // Initialize the database and insert the model + db.initialize().await.unwrap(); + db.insert_models(&[ModelParams { + provider, + name: model.to_string(), + max_requests_per_minute: 5, + max_tokens_per_minute: 10_000, + max_tokens_per_day: 50_000, + price_per_million_input_tokens: PRICE_PER_MILLION_INPUT_TOKENS, + price_per_million_output_tokens: PRICE_PER_MILLION_OUTPUT_TOKENS, + }]) + .await + .unwrap(); + + // Set a fixed datetime for consistent testing + let now = DateTime::parse_from_rfc3339("2024-08-08T22:46:33Z") + .unwrap() + .with_timezone(&Utc); + + let user_id = UserId::from_proto(123); + + let max_monthly_spend = Cents::from_dollars(11); + + // Record usage that brings us close to the limit but doesn't exceed it + // Let's say we use $10.50 worth of tokens + let tokens_to_use = 210_000_000; // This will cost $10.50 at $0.05 per 1 million tokens + let usage = TokenUsage { + input: tokens_to_use, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }; + + // Verify that before we record any usage, there are 0 billing events + let billing_events = db.get_billing_events().await.unwrap(); + assert_eq!(billing_events.len(), 0); + + db.record_usage( + user_id, + false, + provider, + model, + usage, + true, + max_monthly_spend, + now, + ) + .await + .unwrap(); + + // Verify the recorded usage and spending + let recorded_usage = db.get_usage(user_id, provider, model, now).await.unwrap(); + // Verify that we exceeded the free tier usage + assert_eq!(recorded_usage.spending_this_month, Cents::new(1050)); + assert!(recorded_usage.spending_this_month > FREE_TIER_MONTHLY_SPENDING_LIMIT); + + // Verify that there is one `billing_event` record + let billing_events = db.get_billing_events().await.unwrap(); + assert_eq!(billing_events.len(), 1); + + let (billing_event, _model) = &billing_events[0]; + assert_eq!(billing_event.user_id, user_id); + assert_eq!(billing_event.input_tokens, tokens_to_use as i64); + assert_eq!(billing_event.input_cache_creation_tokens, 0); + assert_eq!(billing_event.input_cache_read_tokens, 0); + assert_eq!(billing_event.output_tokens, 0); + + // Record usage that puts us at $20.50 + let usage_2 = TokenUsage { + input: 200_000_000, // This will cost $10 more, pushing us from $10.50 to $20.50, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }; + db.record_usage( + user_id, + false, + provider, + model, + usage_2, + true, + max_monthly_spend, + now, + ) + .await + .unwrap(); + + // Verify the updated usage and spending + let updated_usage = db.get_usage(user_id, provider, model, now).await.unwrap(); + assert_eq!(updated_usage.spending_this_month, Cents::new(2050)); + + // Verify that there are now two billing events + let billing_events = db.get_billing_events().await.unwrap(); + assert_eq!(billing_events.len(), 2); + + let tokens_to_exceed = 20_000_000; // This will cost $1.00 more, pushing us from $20.50 to $21.50, which is over the $11 monthly maximum limit + let usage_exceeding = TokenUsage { + input: tokens_to_exceed, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }; + + // This should still create a billing event as it's the first request that exceeds the limit + db.record_usage( + user_id, + false, + provider, + model, + usage_exceeding, + true, + max_monthly_spend, + now, + ) + .await + .unwrap(); + // Verify the updated usage and spending + let updated_usage = db.get_usage(user_id, provider, model, now).await.unwrap(); + assert_eq!(updated_usage.spending_this_month, Cents::new(2150)); + + // Verify that we never exceed the user max spending for the user + // and avoid charging them. + let billing_events = db.get_billing_events().await.unwrap(); + assert_eq!(billing_events.len(), 2); +} diff --git a/crates/collab/src/llm/db/tests/provider_tests.rs b/crates/collab/src/llm/db/tests/provider_tests.rs index ef0da1c373fca6..0bb55ee4b69a6c 100644 --- a/crates/collab/src/llm/db/tests/provider_tests.rs +++ b/crates/collab/src/llm/db/tests/provider_tests.rs @@ -26,7 +26,6 @@ async fn test_initialize_providers(db: &mut LlmDatabase) { LanguageModelProvider::Anthropic, LanguageModelProvider::Google, LanguageModelProvider::OpenAi, - LanguageModelProvider::Zed ] ) } diff --git a/crates/collab/src/llm/db/tests/usage_tests.rs b/crates/collab/src/llm/db/tests/usage_tests.rs index 905a3dda08101f..8e96ac4f54082c 100644 --- a/crates/collab/src/llm/db/tests/usage_tests.rs +++ b/crates/collab/src/llm/db/tests/usage_tests.rs @@ -2,11 +2,11 @@ use crate::{ db::UserId, llm::db::{ queries::{providers::ModelParams, usages::Usage}, - LlmDatabase, + LlmDatabase, TokenUsage, }, - test_llm_db, + test_llm_db, Cents, }; -use chrono::{Duration, Utc}; +use chrono::{DateTime, Duration, Utc}; use pretty_assertions::assert_eq; use rpc::LanguageModelProvider; @@ -29,18 +29,49 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { .await .unwrap(); - let t0 = Utc::now(); + // We're using a fixed datetime to prevent flakiness based on the clock. + let t0 = DateTime::parse_from_rfc3339("2024-08-08T22:46:33Z") + .unwrap() + .with_timezone(&Utc); let user_id = UserId::from_proto(123); let now = t0; - db.record_usage(user_id, false, provider, model, 1000, 0, now) - .await - .unwrap(); + db.record_usage( + user_id, + false, + provider, + model, + TokenUsage { + input: 1000, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }, + false, + Cents::ZERO, + now, + ) + .await + .unwrap(); let now = t0 + Duration::seconds(10); - db.record_usage(user_id, false, provider, model, 2000, 0, now) - .await - .unwrap(); + db.record_usage( + user_id, + false, + provider, + model, + TokenUsage { + input: 2000, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }, + false, + Cents::ZERO, + now, + ) + .await + .unwrap(); let usage = db.get_usage(user_id, provider, model, now).await.unwrap(); assert_eq!( @@ -49,10 +80,14 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { requests_this_minute: 2, tokens_this_minute: 3000, tokens_this_day: 3000, - input_tokens_this_month: 3000, - output_tokens_this_month: 0, - spending_this_month: 0, - lifetime_spending: 0, + tokens_this_month: TokenUsage { + input: 3000, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }, + spending_this_month: Cents::ZERO, + lifetime_spending: Cents::ZERO, } ); @@ -64,17 +99,35 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { requests_this_minute: 1, tokens_this_minute: 2000, tokens_this_day: 3000, - input_tokens_this_month: 3000, - output_tokens_this_month: 0, - spending_this_month: 0, - lifetime_spending: 0, + tokens_this_month: TokenUsage { + input: 3000, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }, + spending_this_month: Cents::ZERO, + lifetime_spending: Cents::ZERO, } ); let now = t0 + Duration::seconds(60); - db.record_usage(user_id, false, provider, model, 3000, 0, now) - .await - .unwrap(); + db.record_usage( + user_id, + false, + provider, + model, + TokenUsage { + input: 3000, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }, + false, + Cents::ZERO, + now, + ) + .await + .unwrap(); let usage = db.get_usage(user_id, provider, model, now).await.unwrap(); assert_eq!( @@ -83,10 +136,14 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { requests_this_minute: 2, tokens_this_minute: 5000, tokens_this_day: 6000, - input_tokens_this_month: 6000, - output_tokens_this_month: 0, - spending_this_month: 0, - lifetime_spending: 0, + tokens_this_month: TokenUsage { + input: 6000, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }, + spending_this_month: Cents::ZERO, + lifetime_spending: Cents::ZERO, } ); @@ -99,16 +156,34 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { requests_this_minute: 0, tokens_this_minute: 0, tokens_this_day: 5000, - input_tokens_this_month: 6000, - output_tokens_this_month: 0, - spending_this_month: 0, - lifetime_spending: 0, + tokens_this_month: TokenUsage { + input: 6000, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }, + spending_this_month: Cents::ZERO, + lifetime_spending: Cents::ZERO, } ); - db.record_usage(user_id, false, provider, model, 4000, 0, now) - .await - .unwrap(); + db.record_usage( + user_id, + false, + provider, + model, + TokenUsage { + input: 4000, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }, + false, + Cents::ZERO, + now, + ) + .await + .unwrap(); let usage = db.get_usage(user_id, provider, model, now).await.unwrap(); assert_eq!( @@ -117,26 +192,93 @@ async fn test_tracking_usage(db: &mut LlmDatabase) { requests_this_minute: 1, tokens_this_minute: 4000, tokens_this_day: 9000, - input_tokens_this_month: 10000, - output_tokens_this_month: 0, - spending_this_month: 0, - lifetime_spending: 0, + tokens_this_month: TokenUsage { + input: 10000, + input_cache_creation: 0, + input_cache_read: 0, + output: 0, + }, + spending_this_month: Cents::ZERO, + lifetime_spending: Cents::ZERO, + } + ); + + // We're using a fixed datetime to prevent flakiness based on the clock. + let now = DateTime::parse_from_rfc3339("2024-10-08T22:15:58Z") + .unwrap() + .with_timezone(&Utc); + + // Test cache creation input tokens + db.record_usage( + user_id, + false, + provider, + model, + TokenUsage { + input: 1000, + input_cache_creation: 500, + input_cache_read: 0, + output: 0, + }, + false, + Cents::ZERO, + now, + ) + .await + .unwrap(); + + let usage = db.get_usage(user_id, provider, model, now).await.unwrap(); + assert_eq!( + usage, + Usage { + requests_this_minute: 1, + tokens_this_minute: 1500, + tokens_this_day: 1500, + tokens_this_month: TokenUsage { + input: 1000, + input_cache_creation: 500, + input_cache_read: 0, + output: 0, + }, + spending_this_month: Cents::ZERO, + lifetime_spending: Cents::ZERO, } ); - let t2 = t0 + Duration::days(30); - let now = t2; + // Test cache read input tokens + db.record_usage( + user_id, + false, + provider, + model, + TokenUsage { + input: 1000, + input_cache_creation: 0, + input_cache_read: 300, + output: 0, + }, + false, + Cents::ZERO, + now, + ) + .await + .unwrap(); + let usage = db.get_usage(user_id, provider, model, now).await.unwrap(); assert_eq!( usage, Usage { - requests_this_minute: 0, - tokens_this_minute: 0, - tokens_this_day: 0, - input_tokens_this_month: 9000, - output_tokens_this_month: 0, - spending_this_month: 0, - lifetime_spending: 0, + requests_this_minute: 2, + tokens_this_minute: 2800, + tokens_this_day: 2800, + tokens_this_month: TokenUsage { + input: 2000, + input_cache_creation: 500, + input_cache_read: 300, + output: 0, + }, + spending_this_month: Cents::ZERO, + lifetime_spending: Cents::ZERO, } ); } diff --git a/crates/collab/src/llm/telemetry.rs b/crates/collab/src/llm/telemetry.rs index 17a2cb9cd3389d..9daaaf3032090e 100644 --- a/crates/collab/src/llm/telemetry.rs +++ b/crates/collab/src/llm/telemetry.rs @@ -12,11 +12,15 @@ pub struct LlmUsageEventRow { pub model: String, pub provider: String, pub input_token_count: u64, + pub cache_creation_input_token_count: u64, + pub cache_read_input_token_count: u64, pub output_token_count: u64, pub requests_this_minute: u64, pub tokens_this_minute: u64, pub tokens_this_day: u64, pub input_tokens_this_month: u64, + pub cache_creation_input_tokens_this_month: u64, + pub cache_read_input_tokens_this_month: u64, pub output_tokens_this_month: u64, pub spending_this_month: u64, pub lifetime_spending: u64, diff --git a/crates/collab/src/llm/token.rs b/crates/collab/src/llm/token.rs index e1e6c7332627dc..28f52b5164832d 100644 --- a/crates/collab/src/llm/token.rs +++ b/crates/collab/src/llm/token.rs @@ -1,4 +1,8 @@ -use crate::{db::UserId, Config}; +use crate::llm::DEFAULT_MAX_MONTHLY_SPEND; +use crate::{ + db::{billing_preference, UserId}, + Config, +}; use anyhow::{anyhow, Result}; use chrono::Utc; use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation}; @@ -13,26 +17,25 @@ pub struct LlmTokenClaims { pub exp: u64, pub jti: String, pub user_id: u64, - // This field is temporarily optional so it can be added - // in a backwards-compatible way. We can make it required - // once all of the LLM tokens have cycled (~1 hour after - // this change has been deployed). - #[serde(default)] - pub github_user_login: Option, + pub github_user_login: String, pub is_staff: bool, - #[serde(default)] pub has_llm_closed_beta_feature_flag: bool, + pub has_llm_subscription: bool, + pub max_monthly_spend_in_cents: u32, pub plan: rpc::proto::Plan, } const LLM_TOKEN_LIFETIME: Duration = Duration::from_secs(60 * 60); impl LlmTokenClaims { + #[allow(clippy::too_many_arguments)] pub fn create( user_id: UserId, github_user_login: String, is_staff: bool, + billing_preferences: Option, has_llm_closed_beta_feature_flag: bool, + has_llm_subscription: bool, plan: rpc::proto::Plan, config: &Config, ) -> Result { @@ -47,9 +50,14 @@ impl LlmTokenClaims { exp: (now + LLM_TOKEN_LIFETIME).timestamp() as u64, jti: uuid::Uuid::new_v4().to_string(), user_id: user_id.to_proto(), - github_user_login: Some(github_user_login), + github_user_login, is_staff, has_llm_closed_beta_feature_flag, + has_llm_subscription, + max_monthly_spend_in_cents: billing_preferences + .map_or(DEFAULT_MAX_MONTHLY_SPEND.0, |preferences| { + preferences.max_monthly_llm_usage_spending_in_cents as u32 + }), plan, }; diff --git a/crates/collab/src/main.rs b/crates/collab/src/main.rs index 0e6bb67d13db37..ee95b6d41f5350 100644 --- a/crates/collab/src/main.rs +++ b/crates/collab/src/main.rs @@ -6,6 +6,7 @@ use axum::{ routing::get, Extension, Router, }; +use collab::api::billing::sync_llm_usage_with_stripe_periodically; use collab::api::CloudflareIpCountryHeader; use collab::llm::{db::LlmDatabase, log_usage_periodically}; use collab::migrations::run_database_migrations; @@ -29,7 +30,7 @@ use tower_http::trace::TraceLayer; use tracing_subscriber::{ filter::EnvFilter, fmt::format::JsonFields, util::SubscriberInitExt, Layer, }; -use util::ResultExt as _; +use util::{maybe, ResultExt as _}; const VERSION: &str = env!("CARGO_PKG_VERSION"); const REVISION: Option<&'static str> = option_env!("GITHUB_SHA"); @@ -110,6 +111,13 @@ async fn main() -> Result<()> { let state = AppState::new(config, Executor::Production).await?; + if let Some(stripe_billing) = state.stripe_billing.clone() { + let executor = state.executor.clone(); + executor.spawn_detached(async move { + stripe_billing.initialize().await.trace_err(); + }); + } + if mode.is_collab() { state.db.purge_old_embeddings().await.trace_err(); RateLimiter::save_periodically( @@ -124,6 +132,8 @@ async fn main() -> Result<()> { let rpc_server = collab::rpc::Server::new(epoch, state.clone()); rpc_server.start().await?; + poll_stripe_events_periodically(state.clone(), rpc_server.clone()); + app = app .merge(collab::api::routes(rpc_server.clone())) .merge(collab::rpc::routes(rpc_server.clone())); @@ -132,10 +142,32 @@ async fn main() -> Result<()> { } if mode.is_api() { - poll_stripe_events_periodically(state.clone()); fetch_extensions_from_blob_store_periodically(state.clone()); spawn_user_backfiller(state.clone()); + let llm_db = maybe!(async { + let database_url = state + .config + .llm_database_url + .as_ref() + .ok_or_else(|| anyhow!("missing LLM_DATABASE_URL"))?; + let max_connections = state + .config + .llm_database_max_connections + .ok_or_else(|| anyhow!("missing LLM_DATABASE_MAX_CONNECTIONS"))?; + + let mut db_options = db::ConnectOptions::new(database_url); + db_options.max_connections(max_connections); + LlmDatabase::new(db_options, state.executor.clone()).await + }) + .await + .trace_err(); + + if let Some(mut llm_db) = llm_db { + llm_db.initialize().await?; + sync_llm_usage_with_stripe_periodically(state.clone()); + } + app = app .merge(collab::api::events::router()) .merge(collab::api::extensions::router()) diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 7db446b2b8a725..05d7726069959f 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -35,7 +35,9 @@ use chrono::Utc; use collections::{HashMap, HashSet}; pub use connection_pool::{ConnectionPool, ZedVersion}; use core::fmt::{self, Debug, Formatter}; +use http_client::HttpClient; use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL}; +use reqwest_client::ReqwestClient; use sha2::Digest; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; @@ -45,7 +47,6 @@ use futures::{ stream::FuturesUnordered, FutureExt, SinkExt, StreamExt, TryStreamExt, }; -use http_client::IsahcHttpClient; use prometheus::{register_int_gauge, IntGauge}; use rpc::{ proto::{ @@ -139,7 +140,7 @@ struct Session { connection_pool: Arc>, app_state: Arc, supermaven_client: Option>, - http_client: Arc, + http_client: Arc, /// The GeoIP country code for the user. #[allow(unused)] geoip_country_code: Option, @@ -190,16 +191,26 @@ impl Session { } } - pub async fn current_plan(&self, db: MutexGuard<'_, DbHandle>) -> anyhow::Result { + pub async fn has_llm_subscription( + &self, + db: &MutexGuard<'_, DbHandle>, + ) -> anyhow::Result { if self.is_staff() { - return Ok(proto::Plan::ZedPro); + return Ok(true); } let Some(user_id) = self.user_id() else { - return Ok(proto::Plan::Free); + return Ok(false); }; - if db.has_active_billing_subscription(user_id).await? { + Ok(db.has_active_billing_subscription(user_id).await?) + } + + pub async fn current_plan( + &self, + _db: &MutexGuard<'_, DbHandle>, + ) -> anyhow::Result { + if self.is_staff() { Ok(proto::Plan::ZedPro) } else { Ok(proto::Plan::Free) @@ -458,9 +469,6 @@ impl Server { .add_request_handler(user_handler( forward_project_request_for_owner::, )) - .add_request_handler(user_handler( - forward_project_request_for_owner::, - )) .add_request_handler(user_handler( forward_read_only_project_request::, )) @@ -473,9 +481,6 @@ impl Server { .add_request_handler(user_handler( forward_read_only_project_request::, )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) .add_request_handler(user_handler(forward_find_search_candidates_request)) .add_request_handler(user_handler( forward_read_only_project_request::, @@ -956,7 +961,7 @@ impl Server { tracing::info!("connection opened"); let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); - let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { + let http_client = match ReqwestClient::user_agent(&user_agent) { Ok(http_client) => Arc::new(http_client), Err(error) => { tracing::error!(?error, "failed to create HTTP client"); @@ -1213,6 +1218,15 @@ impl Server { Ok(()) } + pub async fn refresh_llm_tokens_for_user(self: &Arc, user_id: UserId) { + let pool = self.connection_pool.lock(); + for connection_id in pool.user_connection_ids(user_id) { + self.peer + .send(connection_id, proto::RefreshLlmToken {}) + .trace_err(); + } + } + pub async fn snapshot<'a>(self: &'a Arc) -> ServerSnapshot<'a> { ServerSnapshot { connection_pool: ConnectionPoolGuard { @@ -1741,6 +1755,7 @@ fn notify_rejoined_projects( worktree_id: worktree.id, path: settings_file.path, content: Some(settings_file.content), + kind: Some(settings_file.kind.to_proto().into()), }, )?; } @@ -1996,6 +2011,7 @@ async fn share_project( RoomId::from_proto(request.room_id), session.connection_id, &request.worktrees, + request.is_ssh_project, request .dev_server_project_id .map(DevServerProjectId::from_proto), @@ -2221,6 +2237,7 @@ fn join_project_internal( worktree_id: worktree.id, path: settings_file.path, content: Some(settings_file.content), + kind: Some(proto::update_user_settings::Kind::Settings.into()), }, )?; } @@ -2296,7 +2313,7 @@ async fn list_remote_directory( let dev_server_connection_id = session .connection_pool() .await - .dev_server_connection_id_supporting(dev_server_id, ZedVersion::with_list_directory())?; + .online_dev_server_connection_id(dev_server_id)?; session .db() @@ -2335,10 +2352,7 @@ async fn update_dev_server_project( let dev_server_connection_id = session .connection_pool() .await - .dev_server_connection_id_supporting( - dev_server_project.dev_server_id, - ZedVersion::with_list_directory(), - )?; + .online_dev_server_connection_id(dev_server_project.dev_server_id)?; session.peer.send( dev_server_connection_id, @@ -2948,40 +2962,6 @@ async fn forward_find_search_candidates_request( .await .host_for_read_only_project_request(project_id, session.connection_id, session.user_id()) .await?; - - let host_version = session - .connection_pool() - .await - .connection(host_connection_id) - .map(|c| c.zed_version); - - if host_version.is_some_and(|host_version| host_version < ZedVersion::with_search_candidates()) - { - let query = request.query.ok_or_else(|| anyhow!("missing query"))?; - let search = proto::SearchProject { - project_id: project_id.to_proto(), - query: query.query, - regex: query.regex, - whole_word: query.whole_word, - case_sensitive: query.case_sensitive, - files_to_include: query.files_to_include, - files_to_exclude: query.files_to_exclude, - include_ignored: query.include_ignored, - }; - - let payload = session - .peer - .forward_request(session.connection_id, host_connection_id, search) - .await?; - return response.send(proto::FindSearchCandidatesResponse { - buffer_ids: payload - .locations - .into_iter() - .map(|loc| loc.buffer_id) - .collect(), - }); - } - let payload = session .peer .forward_request(session.connection_id, host_connection_id, request) @@ -3507,7 +3487,7 @@ fn should_auto_subscribe_to_channels(version: ZedVersion) -> bool { } async fn update_user_plan(_user_id: UserId, session: &Session) -> Result<()> { - let plan = session.current_plan(session.db().await).await?; + let plan = session.current_plan(&session.db().await).await?; session .peer @@ -4507,7 +4487,7 @@ async fn count_language_model_tokens( }; authorize_access_to_legacy_llm_endpoints(&session).await?; - let rate_limit: Box = match session.current_plan(session.db().await).await? { + let rate_limit: Box = match session.current_plan(&session.db().await).await? { proto::Plan::ZedPro => Box::new(ZedProCountLanguageModelTokensRateLimit), proto::Plan::Free => Box::new(FreeCountLanguageModelTokensRateLimit), }; @@ -4628,7 +4608,7 @@ async fn compute_embeddings( let api_key = api_key.context("no OpenAI API key configured on the server")?; authorize_access_to_legacy_llm_endpoints(&session).await?; - let rate_limit: Box = match session.current_plan(session.db().await).await? { + let rate_limit: Box = match session.current_plan(&session.db().await).await? { proto::Plan::ZedPro => Box::new(ZedProComputeEmbeddingsRateLimit), proto::Plan::Free => Box::new(FreeComputeEmbeddingsRateLimit), }; @@ -4946,12 +4926,17 @@ async fn get_llm_api_token( if Utc::now().naive_utc() - account_created_at < MIN_ACCOUNT_AGE_FOR_LLM_USE { Err(anyhow!("account too young"))? } + + let billing_preferences = db.get_billing_preferences(user.id).await?; + let token = LlmTokenClaims::create( user.id, user.github_login.clone(), session.is_staff(), + billing_preferences, has_llm_closed_beta_feature_flag, - session.current_plan(db).await?, + session.has_llm_subscription(&db).await?, + session.current_plan(&db).await?, &session.app_state.config, )?; response.send(proto::GetLlmTokenResponse { token })?; diff --git a/crates/collab/src/rpc/connection_pool.rs b/crates/collab/src/rpc/connection_pool.rs index ad0131aaa18e5f..96deefba7949c7 100644 --- a/crates/collab/src/rpc/connection_pool.rs +++ b/crates/collab/src/rpc/connection_pool.rs @@ -32,15 +32,7 @@ impl fmt::Display for ZedVersion { impl ZedVersion { pub fn can_collaborate(&self) -> bool { - self.0 >= SemanticVersion::new(0, 134, 0) - } - - pub fn with_list_directory() -> ZedVersion { - ZedVersion(SemanticVersion::new(0, 145, 0)) - } - - pub fn with_search_candidates() -> ZedVersion { - ZedVersion(SemanticVersion::new(0, 151, 0)) + self.0 >= SemanticVersion::new(0, 151, 0) } } @@ -169,6 +161,16 @@ impl ConnectionPool { self.connected_dev_servers.get(&dev_server_id).copied() } + pub fn online_dev_server_connection_id( + &self, + dev_server_id: DevServerId, + ) -> Result { + match self.connected_dev_servers.get(&dev_server_id) { + Some(cid) => Ok(*cid), + None => Err(anyhow!(proto::ErrorCode::DevServerOffline)), + } + } + pub fn dev_server_connection_id_supporting( &self, dev_server_id: DevServerId, diff --git a/crates/collab/src/seed.rs b/crates/collab/src/seed.rs index 15aa9d159183f8..5de6515ae3ac89 100644 --- a/crates/collab/src/seed.rs +++ b/crates/collab/src/seed.rs @@ -4,10 +4,13 @@ use anyhow::Context; use chrono::{DateTime, Utc}; use db::Database; use serde::{de::DeserializeOwned, Deserialize}; -use std::{fmt::Write, fs, path::Path}; +use std::{fs, path::Path}; use crate::Config; +/// A GitHub user. +/// +/// This representation corresponds to the entries in the `seed/github_users.json` file. #[derive(Debug, Deserialize)] struct GithubUser { id: i32, @@ -18,12 +21,10 @@ struct GithubUser { #[derive(Deserialize)] struct SeedConfig { - // Which users to create as admins. + /// Which users to create as admins. admins: Vec, - // Which channels to create (all admins are invited to all channels) + /// Which channels to create (all admins are invited to all channels). channels: Vec, - // Number of random users to create from the Github API - number_of_users: Option, } pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result<()> { @@ -47,11 +48,21 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result let flag_names = ["remoting", "language-models"]; let mut flags = Vec::new(); + let existing_feature_flags = db.list_feature_flags().await?; + for flag_name in flag_names { + if existing_feature_flags + .iter() + .any(|flag| flag.flag == flag_name) + { + log::info!("Flag {flag_name:?} already exists"); + continue; + } + let flag = db .create_user_flag(flag_name, false) .await - .unwrap_or_else(|_| panic!("failed to create flag: '{flag_name}'")); + .unwrap_or_else(|err| panic!("failed to create flag: '{flag_name}': {err}")); flags.push(flag); } @@ -106,44 +117,29 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result } } - // TODO: Fix this later - if let Some(number_of_users) = seed_config.number_of_users { - // Fetch 100 other random users from GitHub and insert them into the database - // (for testing autocompleters, etc.) - let mut user_count = db - .get_all_users(0, 200) + let github_users_filepath = seed_path.parent().unwrap().join("seed/github_users.json"); + let github_users: Vec = + serde_json::from_str(&fs::read_to_string(github_users_filepath)?)?; + + for github_user in github_users { + log::info!("Seeding {:?} from GitHub", github_user.login); + + let user = db + .get_or_create_user_by_github_account( + &github_user.login, + github_user.id, + github_user.email.as_deref(), + github_user.created_at, + None, + ) .await - .expect("failed to load users from db") - .len(); - let mut last_user_id = None; - while user_count < number_of_users { - let mut uri = "https://api.github.com/users?per_page=100".to_string(); - if let Some(last_user_id) = last_user_id { - write!(&mut uri, "&since={}", last_user_id).unwrap(); - } - let users = fetch_github::>(&client, &uri).await; - - for github_user in users { - last_user_id = Some(github_user.id); - user_count += 1; - let user = db - .get_or_create_user_by_github_account( - &github_user.login, - github_user.id, - github_user.email.as_deref(), - github_user.created_at, - None, - ) - .await - .expect("failed to insert user"); - - for flag in &flags { - db.add_user_flag(user.id, *flag).await.context(format!( - "Unable to enable flag '{}' for user '{}'", - flag, user.id - ))?; - } - } + .expect("failed to insert user"); + + for flag in &flags { + db.add_user_flag(user.id, *flag).await.context(format!( + "Unable to enable flag '{}' for user '{}'", + flag, user.id + ))?; } } diff --git a/crates/collab/src/stripe_billing.rs b/crates/collab/src/stripe_billing.rs new file mode 100644 index 00000000000000..126db988a1ea8a --- /dev/null +++ b/crates/collab/src/stripe_billing.rs @@ -0,0 +1,479 @@ +use std::sync::Arc; + +use crate::{llm, Cents, Result}; +use anyhow::Context; +use chrono::{Datelike, Utc}; +use collections::HashMap; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock; + +pub struct StripeBilling { + state: RwLock, + client: Arc, +} + +#[derive(Default)] +struct StripeBillingState { + meters_by_event_name: HashMap, + price_ids_by_meter_id: HashMap, +} + +pub struct StripeModel { + input_tokens_price: StripeBillingPrice, + input_cache_creation_tokens_price: StripeBillingPrice, + input_cache_read_tokens_price: StripeBillingPrice, + output_tokens_price: StripeBillingPrice, +} + +struct StripeBillingPrice { + id: stripe::PriceId, + meter_event_name: String, +} + +impl StripeBilling { + pub fn new(client: Arc) -> Self { + Self { + client, + state: RwLock::default(), + } + } + + pub async fn initialize(&self) -> Result<()> { + log::info!("StripeBilling: initializing"); + + let mut state = self.state.write().await; + + let (meters, prices) = futures::try_join!( + StripeMeter::list(&self.client), + stripe::Price::list( + &self.client, + &stripe::ListPrices { + limit: Some(100), + ..Default::default() + } + ) + )?; + + for meter in meters.data { + state + .meters_by_event_name + .insert(meter.event_name.clone(), meter); + } + + for price in prices.data { + if let Some(recurring) = price.recurring { + if let Some(meter) = recurring.meter { + state.price_ids_by_meter_id.insert(meter, price.id); + } + } + } + + log::info!("StripeBilling: initialized"); + + Ok(()) + } + + pub async fn register_model(&self, model: &llm::db::model::Model) -> Result { + let input_tokens_price = self + .get_or_insert_price( + &format!("model_{}/input_tokens", model.id), + &format!("{} (Input Tokens)", model.name), + Cents::new(model.price_per_million_input_tokens as u32), + ) + .await?; + let input_cache_creation_tokens_price = self + .get_or_insert_price( + &format!("model_{}/input_cache_creation_tokens", model.id), + &format!("{} (Input Cache Creation Tokens)", model.name), + Cents::new(model.price_per_million_cache_creation_input_tokens as u32), + ) + .await?; + let input_cache_read_tokens_price = self + .get_or_insert_price( + &format!("model_{}/input_cache_read_tokens", model.id), + &format!("{} (Input Cache Read Tokens)", model.name), + Cents::new(model.price_per_million_cache_read_input_tokens as u32), + ) + .await?; + let output_tokens_price = self + .get_or_insert_price( + &format!("model_{}/output_tokens", model.id), + &format!("{} (Output Tokens)", model.name), + Cents::new(model.price_per_million_output_tokens as u32), + ) + .await?; + Ok(StripeModel { + input_tokens_price, + input_cache_creation_tokens_price, + input_cache_read_tokens_price, + output_tokens_price, + }) + } + + async fn get_or_insert_price( + &self, + meter_event_name: &str, + price_description: &str, + price_per_million_tokens: Cents, + ) -> Result { + // Fast code path when the meter and the price already exist. + { + let state = self.state.read().await; + if let Some(meter) = state.meters_by_event_name.get(meter_event_name) { + if let Some(price_id) = state.price_ids_by_meter_id.get(&meter.id) { + return Ok(StripeBillingPrice { + id: price_id.clone(), + meter_event_name: meter_event_name.to_string(), + }); + } + } + } + + let mut state = self.state.write().await; + let meter = if let Some(meter) = state.meters_by_event_name.get(meter_event_name) { + meter.clone() + } else { + let meter = StripeMeter::create( + &self.client, + StripeCreateMeterParams { + default_aggregation: DefaultAggregation { formula: "sum" }, + display_name: price_description.to_string(), + event_name: meter_event_name, + }, + ) + .await?; + state + .meters_by_event_name + .insert(meter_event_name.to_string(), meter.clone()); + meter + }; + + let price_id = if let Some(price_id) = state.price_ids_by_meter_id.get(&meter.id) { + price_id.clone() + } else { + let price = stripe::Price::create( + &self.client, + stripe::CreatePrice { + active: Some(true), + billing_scheme: Some(stripe::PriceBillingScheme::PerUnit), + currency: stripe::Currency::USD, + currency_options: None, + custom_unit_amount: None, + expand: &[], + lookup_key: None, + metadata: None, + nickname: None, + product: None, + product_data: Some(stripe::CreatePriceProductData { + id: None, + active: Some(true), + metadata: None, + name: price_description.to_string(), + statement_descriptor: None, + tax_code: None, + unit_label: None, + }), + recurring: Some(stripe::CreatePriceRecurring { + aggregate_usage: None, + interval: stripe::CreatePriceRecurringInterval::Month, + interval_count: None, + trial_period_days: None, + usage_type: Some(stripe::CreatePriceRecurringUsageType::Metered), + meter: Some(meter.id.clone()), + }), + tax_behavior: None, + tiers: None, + tiers_mode: None, + transfer_lookup_key: None, + transform_quantity: None, + unit_amount: None, + unit_amount_decimal: Some(&format!( + "{:.12}", + price_per_million_tokens.0 as f64 / 1_000_000f64 + )), + }, + ) + .await?; + state + .price_ids_by_meter_id + .insert(meter.id, price.id.clone()); + price.id + }; + + Ok(StripeBillingPrice { + id: price_id, + meter_event_name: meter_event_name.to_string(), + }) + } + + pub async fn subscribe_to_model( + &self, + subscription_id: &stripe::SubscriptionId, + model: &StripeModel, + ) -> Result<()> { + let subscription = + stripe::Subscription::retrieve(&self.client, &subscription_id, &[]).await?; + + let mut items = Vec::new(); + + if !subscription_contains_price(&subscription, &model.input_tokens_price.id) { + items.push(stripe::UpdateSubscriptionItems { + price: Some(model.input_tokens_price.id.to_string()), + ..Default::default() + }); + } + + if !subscription_contains_price(&subscription, &model.input_cache_creation_tokens_price.id) + { + items.push(stripe::UpdateSubscriptionItems { + price: Some(model.input_cache_creation_tokens_price.id.to_string()), + ..Default::default() + }); + } + + if !subscription_contains_price(&subscription, &model.input_cache_read_tokens_price.id) { + items.push(stripe::UpdateSubscriptionItems { + price: Some(model.input_cache_read_tokens_price.id.to_string()), + ..Default::default() + }); + } + + if !subscription_contains_price(&subscription, &model.output_tokens_price.id) { + items.push(stripe::UpdateSubscriptionItems { + price: Some(model.output_tokens_price.id.to_string()), + ..Default::default() + }); + } + + if !items.is_empty() { + items.extend(subscription.items.data.iter().map(|item| { + stripe::UpdateSubscriptionItems { + id: Some(item.id.to_string()), + ..Default::default() + } + })); + + stripe::Subscription::update( + &self.client, + subscription_id, + stripe::UpdateSubscription { + items: Some(items), + ..Default::default() + }, + ) + .await?; + } + + Ok(()) + } + + pub async fn bill_model_usage( + &self, + customer_id: &stripe::CustomerId, + model: &StripeModel, + event: &llm::db::billing_event::Model, + ) -> Result<()> { + let timestamp = Utc::now().timestamp(); + + if event.input_tokens > 0 { + StripeMeterEvent::create( + &self.client, + StripeCreateMeterEventParams { + identifier: &format!("input_tokens/{}", event.idempotency_key), + event_name: &model.input_tokens_price.meter_event_name, + payload: StripeCreateMeterEventPayload { + value: event.input_tokens as u64, + stripe_customer_id: customer_id, + }, + timestamp: Some(timestamp), + }, + ) + .await?; + } + + if event.input_cache_creation_tokens > 0 { + StripeMeterEvent::create( + &self.client, + StripeCreateMeterEventParams { + identifier: &format!("input_cache_creation_tokens/{}", event.idempotency_key), + event_name: &model.input_cache_creation_tokens_price.meter_event_name, + payload: StripeCreateMeterEventPayload { + value: event.input_cache_creation_tokens as u64, + stripe_customer_id: customer_id, + }, + timestamp: Some(timestamp), + }, + ) + .await?; + } + + if event.input_cache_read_tokens > 0 { + StripeMeterEvent::create( + &self.client, + StripeCreateMeterEventParams { + identifier: &format!("input_cache_read_tokens/{}", event.idempotency_key), + event_name: &model.input_cache_read_tokens_price.meter_event_name, + payload: StripeCreateMeterEventPayload { + value: event.input_cache_read_tokens as u64, + stripe_customer_id: customer_id, + }, + timestamp: Some(timestamp), + }, + ) + .await?; + } + + if event.output_tokens > 0 { + StripeMeterEvent::create( + &self.client, + StripeCreateMeterEventParams { + identifier: &format!("output_tokens/{}", event.idempotency_key), + event_name: &model.output_tokens_price.meter_event_name, + payload: StripeCreateMeterEventPayload { + value: event.output_tokens as u64, + stripe_customer_id: customer_id, + }, + timestamp: Some(timestamp), + }, + ) + .await?; + } + + Ok(()) + } + + pub async fn checkout( + &self, + customer_id: stripe::CustomerId, + github_login: &str, + model: &StripeModel, + success_url: &str, + ) -> Result { + let first_of_next_month = Utc::now() + .checked_add_months(chrono::Months::new(1)) + .unwrap() + .with_day(1) + .unwrap(); + + let mut params = stripe::CreateCheckoutSession::new(); + params.mode = Some(stripe::CheckoutSessionMode::Subscription); + params.customer = Some(customer_id); + params.client_reference_id = Some(github_login); + params.subscription_data = Some(stripe::CreateCheckoutSessionSubscriptionData { + billing_cycle_anchor: Some(first_of_next_month.timestamp()), + ..Default::default() + }); + params.line_items = Some( + [ + &model.input_tokens_price.id, + &model.input_cache_creation_tokens_price.id, + &model.input_cache_read_tokens_price.id, + &model.output_tokens_price.id, + ] + .into_iter() + .map(|price_id| stripe::CreateCheckoutSessionLineItems { + price: Some(price_id.to_string()), + ..Default::default() + }) + .collect(), + ); + params.success_url = Some(success_url); + + let session = stripe::CheckoutSession::create(&self.client, params).await?; + Ok(session.url.context("no checkout session URL")?) + } +} + +#[derive(Serialize)] +struct DefaultAggregation { + formula: &'static str, +} + +#[derive(Serialize)] +struct StripeCreateMeterParams<'a> { + default_aggregation: DefaultAggregation, + display_name: String, + event_name: &'a str, +} + +#[derive(Clone, Deserialize)] +struct StripeMeter { + id: String, + event_name: String, +} + +impl StripeMeter { + pub fn create( + client: &stripe::Client, + params: StripeCreateMeterParams, + ) -> stripe::Response { + client.post_form("/billing/meters", params) + } + + pub fn list(client: &stripe::Client) -> stripe::Response> { + #[derive(Serialize)] + struct Params { + #[serde(skip_serializing_if = "Option::is_none")] + limit: Option, + } + + client.get_query("/billing/meters", Params { limit: Some(100) }) + } +} + +#[derive(Deserialize)] +struct StripeMeterEvent { + identifier: String, +} + +impl StripeMeterEvent { + pub async fn create( + client: &stripe::Client, + params: StripeCreateMeterEventParams<'_>, + ) -> Result { + let identifier = params.identifier; + match client.post_form("/billing/meter_events", params).await { + Ok(event) => Ok(event), + Err(stripe::StripeError::Stripe(error)) => { + if error.http_status == 400 + && error + .message + .as_ref() + .map_or(false, |message| message.contains(identifier)) + { + Ok(Self { + identifier: identifier.to_string(), + }) + } else { + Err(stripe::StripeError::Stripe(error)) + } + } + Err(error) => Err(error), + } + } +} + +#[derive(Serialize)] +struct StripeCreateMeterEventParams<'a> { + identifier: &'a str, + event_name: &'a str, + payload: StripeCreateMeterEventPayload<'a>, + timestamp: Option, +} + +#[derive(Serialize)] +struct StripeCreateMeterEventPayload<'a> { + value: u64, + stripe_customer_id: &'a stripe::CustomerId, +} + +fn subscription_contains_price( + subscription: &stripe::Subscription, + price_id: &stripe::PriceId, +) -> bool { + subscription.items.data.iter().any(|item| { + item.price + .as_ref() + .map_or(false, |price| price.id == *price_id) + }) +} diff --git a/crates/collab/src/tests.rs b/crates/collab/src/tests.rs index c452705f9be5bf..f6e0bc3036b1c5 100644 --- a/crates/collab/src/tests.rs +++ b/crates/collab/src/tests.rs @@ -63,6 +63,6 @@ fn rust_lang() -> Arc { }, ..Default::default() }, - Some(tree_sitter_rust::language()), + Some(tree_sitter_rust::LANGUAGE.into()), )) } diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index 1ba41c45bb6068..b5bfd0f03b9ec7 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -246,7 +246,7 @@ async fn test_channel_notes_participant_indices( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); // Clients A and B open the same file. diff --git a/crates/collab/src/tests/channel_guest_tests.rs b/crates/collab/src/tests/channel_guest_tests.rs index 8df318dc290936..06b14bee5e6df6 100644 --- a/crates/collab/src/tests/channel_guest_tests.rs +++ b/crates/collab/src/tests/channel_guest_tests.rs @@ -50,7 +50,7 @@ async fn test_channel_guests( project_b.read_with(cx_b, |project, _| project.remote_id()), Some(project_id), ); - assert!(project_b.read_with(cx_b, |project, _| project.is_read_only())); + assert!(project_b.read_with(cx_b, |project, cx| project.is_read_only(cx))); assert!(project_b .update(cx_b, |project, cx| { let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); @@ -103,7 +103,7 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test workspace.active_item_as::(cx).unwrap(), ) }); - assert!(project_b.read_with(cx_b, |project, _| project.is_read_only())); + assert!(project_b.read_with(cx_b, |project, cx| project.is_read_only(cx))); assert!(editor_b.update(cx_b, |e, cx| e.read_only(cx))); assert!(room_b.read_with(cx_b, |room, _| !room.can_use_microphone())); assert!(room_b @@ -127,7 +127,7 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test cx_a.run_until_parked(); // project and buffers are now editable - assert!(project_b.read_with(cx_b, |project, _| !project.is_read_only())); + assert!(project_b.read_with(cx_b, |project, cx| !project.is_read_only(cx))); assert!(editor_b.update(cx_b, |editor, cx| !editor.read_only(cx))); // B sees themselves as muted, and can unmute. @@ -153,7 +153,7 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test cx_a.run_until_parked(); // project and buffers are no longer editable - assert!(project_b.read_with(cx_b, |project, _| project.is_read_only())); + assert!(project_b.read_with(cx_b, |project, cx| project.is_read_only(cx))); assert!(editor_b.update(cx_b, |editor, cx| editor.read_only(cx))); assert!(room_b .update(cx_b, |room, cx| room.share_microphone(cx)) diff --git a/crates/collab/src/tests/dev_server_tests.rs b/crates/collab/src/tests/dev_server_tests.rs index 5acdeb706eb144..cbeb2a85a0ae0e 100644 --- a/crates/collab/src/tests/dev_server_tests.rs +++ b/crates/collab/src/tests/dev_server_tests.rs @@ -262,7 +262,7 @@ async fn test_dev_server_leave_room( cx1.executor().run_until_parked(); let (workspace, cx2) = client2.active_workspace(cx2); - cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected())); + cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx))); } #[gpui::test] @@ -308,7 +308,7 @@ async fn test_dev_server_delete( cx1.executor().run_until_parked(); let (workspace, cx2) = client2.active_workspace(cx2); - cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected())); + cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx))); cx1.update(|cx| { dev_server_projects::Store::global(cx).update(cx, |store, _| { @@ -418,12 +418,12 @@ async fn test_dev_server_refresh_access_token( // Assert that the other client was disconnected let (workspace, cx2) = client2.active_workspace(cx2); - cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected())); + cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx))); // Assert that the owner of the dev server does not see the dev server as online anymore let (workspace, cx1) = client1.active_workspace(cx1); cx1.update(|cx| { - assert!(workspace.read(cx).project().read(cx).is_disconnected()); + assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)); dev_server_projects::Store::global(cx).update(cx, |store, _| { assert_eq!( store.dev_servers().first().unwrap().status, diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 3f205b7f937c96..16deef70d58aa8 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -7,18 +7,12 @@ use collections::HashMap; use editor::{ actions::{ ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename, - RevertSelectedHunks, ToggleCodeActions, Undo, - }, - display_map::DisplayRow, - test::{ - editor_hunks, - editor_test_context::{AssertionContextManager, EditorTestContext}, - expanded_hunks, expanded_hunks_background_highlights, + ToggleCodeActions, Undo, }, + test::editor_test_context::{AssertionContextManager, EditorTestContext}, Editor, }; use futures::StreamExt; -use git::diff::DiffHunkStatus; use gpui::{TestAppContext, UpdateGlobal, VisualContext, VisualTestContext}; use indoc::indoc; use language::{ @@ -82,7 +76,7 @@ async fn test_host_disconnect( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; cx_a.background_executor.run_until_parked(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); @@ -120,7 +114,7 @@ async fn test_host_disconnect( project_a.read_with(cx_a, |project, _| assert!(!project.is_shared())); - project_b.read_with(cx_b, |project, _| project.is_read_only()); + project_b.read_with(cx_b, |project, cx| project.is_read_only(cx)); assert!(worktree_a.read_with(cx_a, |tree, _| !tree.has_update_observer())); @@ -198,7 +192,7 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client A let buffer_a = project_a @@ -284,7 +278,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu let active_call_a = cx_a.read(ActiveCall::global); client_a.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { capabilities: lsp::ServerCapabilities { @@ -314,7 +308,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a file in an editor as the guest. let buffer_b = project_b @@ -552,7 +546,7 @@ async fn test_collaborating_with_code_actions( client_a.language_registry().add(rust_lang()); let mut fake_language_servers = client_a .language_registry() - .register_fake_lsp_adapter("Rust", FakeLspAdapter::default()); + .register_fake_lsp("Rust", FakeLspAdapter::default()); client_a .fs() @@ -571,7 +565,7 @@ async fn test_collaborating_with_code_actions( .unwrap(); // Join the project as client B. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update(cx_b, |workspace, cx| { @@ -757,7 +751,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T // Set up a fake language server. client_a.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { capabilities: lsp::ServerCapabilities { @@ -786,7 +780,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b @@ -982,7 +976,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes cx_b.update(editor::init); client_a.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { name: "the-language-server", @@ -1036,7 +1030,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes .await .unwrap(); executor.run_until_parked(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; project_b.read_with(cx_b, |project, cx| { let status = project.language_server_statuses(cx).next().unwrap().1; @@ -1132,9 +1126,7 @@ async fn test_share_project( .await .unwrap(); let client_b_peer_id = client_b.peer_id().unwrap(); - let project_b = client_b - .build_dev_server_project(initial_project.id, cx_b) - .await; + let project_b = client_b.join_remote_project(initial_project.id, cx_b).await; let replica_id_b = project_b.read_with(cx_b, |project, _| project.replica_id()); @@ -1236,9 +1228,7 @@ async fn test_share_project( .update(cx_c, |call, cx| call.accept_incoming(cx)) .await .unwrap(); - let _project_c = client_c - .build_dev_server_project(initial_project.id, cx_c) - .await; + let _project_c = client_c.join_remote_project(initial_project.id, cx_c).await; // Client B closes the editor, and client A sees client B's selections removed. cx_b.update(move |_| drop(editor_b)); @@ -1268,7 +1258,7 @@ async fn test_on_input_format_from_host_to_guest( let active_call_a = cx_a.read(ActiveCall::global); client_a.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { capabilities: lsp::ServerCapabilities { @@ -1297,7 +1287,7 @@ async fn test_on_input_format_from_host_to_guest( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a file in an editor as the host. let buffer_a = project_a @@ -1388,7 +1378,7 @@ async fn test_on_input_format_from_guest_to_host( let active_call_a = cx_a.read(ActiveCall::global); client_a.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { capabilities: lsp::ServerCapabilities { @@ -1417,7 +1407,7 @@ async fn test_on_input_format_from_guest_to_host( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a file in an editor as the guest. let buffer_b = project_b @@ -1524,6 +1514,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( show_type_hints: true, show_parameter_hints: false, show_other_hints: true, + show_background: false, }) }); }); @@ -1538,6 +1529,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( show_type_hints: true, show_parameter_hints: false, show_other_hints: true, + show_background: false, }) }); }); @@ -1545,7 +1537,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( client_a.language_registry().add(rust_lang()); client_b.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { capabilities: lsp::ServerCapabilities { @@ -1578,7 +1570,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .unwrap(); // Client B joins the project - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1786,6 +1778,7 @@ async fn test_inlay_hint_refresh_is_forwarded( show_type_hints: false, show_parameter_hints: false, show_other_hints: false, + show_background: false, }) }); }); @@ -1800,6 +1793,7 @@ async fn test_inlay_hint_refresh_is_forwarded( show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); }); @@ -1807,7 +1801,7 @@ async fn test_inlay_hint_refresh_is_forwarded( client_a.language_registry().add(rust_lang()); client_b.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { capabilities: lsp::ServerCapabilities { @@ -1838,7 +1832,7 @@ async fn test_inlay_hint_refresh_is_forwarded( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1966,288 +1960,6 @@ async fn test_inlay_hint_refresh_is_forwarded( }); } -#[gpui::test] -async fn test_multiple_hunk_types_revert(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { - let mut server = TestServer::start(cx_a.executor()).await; - let client_a = server.create_client(cx_a, "user_a").await; - let client_b = server.create_client(cx_b, "user_b").await; - server - .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) - .await; - let active_call_a = cx_a.read(ActiveCall::global); - let active_call_b = cx_b.read(ActiveCall::global); - - cx_a.update(editor::init); - cx_b.update(editor::init); - - client_a.language_registry().add(rust_lang()); - client_b.language_registry().add(rust_lang()); - - let base_text = indoc! {r#"struct Row; -struct Row1; -struct Row2; - -struct Row4; -struct Row5; -struct Row6; - -struct Row8; -struct Row9; -struct Row10;"#}; - - client_a - .fs() - .insert_tree( - "/a", - json!({ - "main.rs": base_text, - }), - ) - .await; - let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; - active_call_a - .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) - .await - .unwrap(); - let project_id = active_call_a - .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) - .await - .unwrap(); - - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; - active_call_b - .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) - .await - .unwrap(); - - let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); - let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); - - let editor_a = workspace_a - .update(cx_a, |workspace, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, cx) - }) - .await - .unwrap() - .downcast::() - .unwrap(); - - let editor_b = workspace_b - .update(cx_b, |workspace, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, cx) - }) - .await - .unwrap() - .downcast::() - .unwrap(); - - let mut editor_cx_a = EditorTestContext { - cx: cx_a.clone(), - window: cx_a.handle(), - editor: editor_a, - assertion_cx: AssertionContextManager::new(), - }; - let mut editor_cx_b = EditorTestContext { - cx: cx_b.clone(), - window: cx_b.handle(), - editor: editor_b, - assertion_cx: AssertionContextManager::new(), - }; - - // host edits the file, that differs from the base text, producing diff hunks - editor_cx_a.set_state(indoc! {r#"struct Row; - struct Row0.1; - struct Row0.2; - struct Row1; - - struct Row4; - struct Row5444; - struct Row6; - - struct Row9; - struct Row1220;ˇ"#}); - editor_cx_a.update_editor(|editor, cx| { - editor - .buffer() - .read(cx) - .as_singleton() - .unwrap() - .update(cx, |buffer, cx| { - buffer.set_diff_base(Some(base_text.into()), cx); - }); - }); - editor_cx_b.update_editor(|editor, cx| { - editor - .buffer() - .read(cx) - .as_singleton() - .unwrap() - .update(cx, |buffer, cx| { - buffer.set_diff_base(Some(base_text.into()), cx); - }); - }); - cx_a.executor().run_until_parked(); - cx_b.executor().run_until_parked(); - - // the client selects a range in the updated buffer, expands it to see the diff for each hunk in the selection - // the host does not see the diffs toggled - editor_cx_b.set_selections_state(indoc! {r#"«ˇstruct Row; - struct Row0.1; - struct Row0.2; - struct Row1; - - struct Row4; - struct Row5444; - struct Row6; - - struct R»ow9; - struct Row1220;"#}); - editor_cx_b - .update_editor(|editor, cx| editor.toggle_hunk_diff(&editor::actions::ToggleHunkDiff, cx)); - cx_a.executor().run_until_parked(); - cx_b.executor().run_until_parked(); - editor_cx_a.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![ - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(1)..DisplayRow(3) - ), - ( - "struct Row2;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(4)..DisplayRow(4) - ), - ( - "struct Row5;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) - ), - ( - "struct Row8;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(9)..DisplayRow(9) - ), - ( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(10)..DisplayRow(10), - ), - ] - ); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - editor_cx_b.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(1)..=DisplayRow(2), DisplayRow(8)..=DisplayRow(8)], - ); - assert_eq!( - all_hunks, - vec![ - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(1)..DisplayRow(3) - ), - ( - "struct Row2;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) - ), - ( - "struct Row5;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(8)..DisplayRow(9) - ), - ( - "struct Row8;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(12)..DisplayRow(12) - ), - ( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(13)..DisplayRow(13), - ), - ] - ); - assert_eq!(all_expanded_hunks, &all_hunks[..all_hunks.len() - 1]); - }); - - // the client reverts the hunks, removing the expanded diffs too - // both host and the client observe the reverted state (with one hunk left, not covered by client's selection) - editor_cx_b.update_editor(|editor, cx| { - editor.revert_selected_hunks(&RevertSelectedHunks, cx); - }); - cx_a.executor().run_until_parked(); - cx_b.executor().run_until_parked(); - editor_cx_a.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(10)..DisplayRow(10), - )] - ); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - editor_cx_b.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(5)] - ); - assert_eq!( - all_hunks, - vec![( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(10)..DisplayRow(10), - )] - ); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - editor_cx_a.assert_editor_state(indoc! {r#"struct Row; - struct Row1; - struct Row2; - - struct Row4; - struct Row5; - struct Row6; - - struct Row8; - struct Row9; - struct Row1220;ˇ"#}); - editor_cx_b.assert_editor_state(indoc! {r#"«ˇstruct Row; - struct Row1; - struct Row2; - - struct Row4; - struct Row5; - struct Row6; - - struct Row8; - struct R»ow9; - struct Row1220;"#}); -} - #[gpui::test(iterations = 10)] async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let mut server = TestServer::start(cx_a.executor()).await; @@ -2334,7 +2046,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA .unwrap(); // Join the project as client B. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update(cx_b, |workspace, cx| { diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index e66b66a1b45893..5e9c001491c6ce 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -74,7 +74,7 @@ async fn test_basic_following( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -162,7 +162,7 @@ async fn test_basic_following( executor.run_until_parked(); let active_call_c = cx_c.read(ActiveCall::global); - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c); active_call_c .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx)) @@ -175,7 +175,7 @@ async fn test_basic_following( cx_d.executor().run_until_parked(); let active_call_d = cx_d.read(ActiveCall::global); - let project_d = client_d.build_dev_server_project(project_id, cx_d).await; + let project_d = client_d.join_remote_project(project_id, cx_d).await; let (workspace_d, cx_d) = client_d.build_workspace(&project_d, cx_d); active_call_d .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx)) @@ -289,7 +289,7 @@ async fn test_basic_following( .get_open_buffer(&(worktree_id, "2.txt").into(), cx) .unwrap() }); - let mut result = MultiBuffer::new(0, Capability::ReadWrite); + let mut result = MultiBuffer::new(Capability::ReadWrite); result.push_excerpts( buffer_a1, [ExcerptRange { @@ -569,7 +569,7 @@ async fn test_following_tab_order( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -686,7 +686,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T .unwrap(); // Client B joins the project. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1199,7 +1199,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1335,7 +1335,7 @@ async fn test_peers_simultaneously_following_each_other( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); executor.run_until_parked(); @@ -1685,7 +1685,7 @@ async fn test_following_into_excluded_file( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index e012fce8c26794..e124fd6a7e3ffc 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -27,13 +27,14 @@ use language::{ use live_kit_client::MacOSDisplay; use lsp::LanguageServerId; use parking_lot::Mutex; +use project::lsp_store::FormatTarget; use project::{ - search::SearchQuery, search::SearchResult, DiagnosticSummary, FormatTrigger, HoverBlockKind, - Project, ProjectPath, + lsp_store::FormatTrigger, search::SearchQuery, search::SearchResult, DiagnosticSummary, + HoverBlockKind, Project, ProjectPath, }; use rand::prelude::*; use serde_json::json; -use settings::SettingsStore; +use settings::{LocalSettingsKind, SettingsStore}; use std::{ cell::{Cell, RefCell}, env, future, mem, @@ -1372,7 +1373,7 @@ async fn test_unshare_project( .unwrap(); let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; executor.run_until_parked(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); @@ -1389,10 +1390,10 @@ async fn test_unshare_project( .unwrap(); executor.run_until_parked(); - assert!(project_b.read_with(cx_b, |project, _| project.is_disconnected())); + assert!(project_b.read_with(cx_b, |project, cx| project.is_disconnected(cx))); // Client C opens the project. - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; // When client A unshares the project, client C's project becomes read-only. project_a @@ -1402,14 +1403,14 @@ async fn test_unshare_project( assert!(worktree_a.read_with(cx_a, |tree, _| !tree.has_update_observer())); - assert!(project_c.read_with(cx_c, |project, _| project.is_disconnected())); + assert!(project_c.read_with(cx_c, |project, cx| project.is_disconnected(cx))); // Client C can open the project again after client A re-shares. let project_id = active_call_a .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_c2 = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c2 = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); @@ -1427,8 +1428,8 @@ async fn test_unshare_project( project_a.read_with(cx_a, |project, _| assert!(!project.is_shared())); - project_c2.read_with(cx_c, |project, _| { - assert!(project.is_disconnected()); + project_c2.read_with(cx_c, |project, cx| { + assert!(project.is_disconnected(cx)); assert!(project.collaborators().is_empty()); }); } @@ -1514,9 +1515,9 @@ async fn test_project_reconnect( .await .unwrap(); - let project_b1 = client_b.build_dev_server_project(project1_id, cx_b).await; - let project_b2 = client_b.build_dev_server_project(project2_id, cx_b).await; - let project_b3 = client_b.build_dev_server_project(project3_id, cx_b).await; + let project_b1 = client_b.join_remote_project(project1_id, cx_b).await; + let project_b2 = client_b.join_remote_project(project2_id, cx_b).await; + let project_b3 = client_b.join_remote_project(project3_id, cx_b).await; executor.run_until_parked(); let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| { @@ -1560,8 +1561,8 @@ async fn test_project_reconnect( assert_eq!(project.collaborators().len(), 1); }); - project_b1.read_with(cx_b, |project, _| { - assert!(!project.is_disconnected()); + project_b1.read_with(cx_b, |project, cx| { + assert!(!project.is_disconnected(cx)); assert_eq!(project.collaborators().len(), 1); }); @@ -1661,7 +1662,7 @@ async fn test_project_reconnect( }); project_b1.read_with(cx_b, |project, cx| { - assert!(!project.is_disconnected()); + assert!(!project.is_disconnected(cx)); assert_eq!( project .worktree_for_id(worktree1_id, cx) @@ -1695,9 +1696,9 @@ async fn test_project_reconnect( ); }); - project_b2.read_with(cx_b, |project, _| assert!(project.is_disconnected())); + project_b2.read_with(cx_b, |project, cx| assert!(project.is_disconnected(cx))); - project_b3.read_with(cx_b, |project, _| assert!(!project.is_disconnected())); + project_b3.read_with(cx_b, |project, cx| assert!(!project.is_disconnected(cx))); buffer_a1.read_with(cx_a, |buffer, _| assert_eq!(buffer.text(), "WaZ")); @@ -1754,7 +1755,7 @@ async fn test_project_reconnect( executor.run_until_parked(); project_b1.read_with(cx_b, |project, cx| { - assert!(!project.is_disconnected()); + assert!(!project.is_disconnected(cx)); assert_eq!( project .worktree_for_id(worktree1_id, cx) @@ -1788,7 +1789,7 @@ async fn test_project_reconnect( ); }); - project_b3.read_with(cx_b, |project, _| assert!(project.is_disconnected())); + project_b3.read_with(cx_b, |project, cx| assert!(project.is_disconnected(cx))); buffer_a1.read_with(cx_a, |buffer, _| assert_eq!(buffer.text(), "WXaYZ")); @@ -2273,7 +2274,7 @@ async fn test_propagate_saves_and_fs_changes( }, ..Default::default() }, - Some(tree_sitter_rust::language()), + Some(tree_sitter_rust::LANGUAGE.into()), )); let javascript = Arc::new(Language::new( LanguageConfig { @@ -2284,7 +2285,7 @@ async fn test_propagate_saves_and_fs_changes( }, ..Default::default() }, - Some(tree_sitter_rust::language()), + Some(tree_sitter_rust::LANGUAGE.into()), )); for client in [&client_a, &client_b, &client_c] { client.language_registry().add(rust.clone()); @@ -2310,8 +2311,8 @@ async fn test_propagate_saves_and_fs_changes( .unwrap(); // Join that worktree as clients B and C. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap()); @@ -2328,11 +2329,11 @@ async fn test_propagate_saves_and_fs_changes( .unwrap(); buffer_b.read_with(cx_b, |buffer, _| { - assert_eq!(&*buffer.language().unwrap().name(), "Rust"); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); }); buffer_c.read_with(cx_c, |buffer, _| { - assert_eq!(&*buffer.language().unwrap().name(), "Rust"); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); }); buffer_b.update(cx_b, |buf, cx| buf.edit([(0..0, "i-am-b, ")], None, cx)); buffer_c.update(cx_c, |buf, cx| buf.edit([(0..0, "i-am-c, ")], None, cx)); @@ -2432,17 +2433,17 @@ async fn test_propagate_saves_and_fs_changes( buffer_a.read_with(cx_a, |buffer, _| { assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); - assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); buffer_b.read_with(cx_b, |buffer, _| { assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); - assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); buffer_c.read_with(cx_c, |buffer, _| { assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); - assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); let new_buffer_a = project_a @@ -2535,7 +2536,7 @@ async fn test_git_diff_base_change( .await .unwrap(); - let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + let project_remote = client_b.join_remote_project(project_id, cx_b).await; let diff_base = " one @@ -2791,7 +2792,7 @@ async fn test_git_branch_name( .await .unwrap(); - let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + let project_remote = client_b.join_remote_project(project_id, cx_b).await; client_a .fs() .set_branch_name(Path::new("/dir/.git"), Some("branch-1")); @@ -2836,7 +2837,7 @@ async fn test_git_branch_name( assert_branch(Some("branch-2"), project, cx) }); - let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_remote_c = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); project_remote_c.read_with(cx_c, |project, cx| { @@ -2891,7 +2892,7 @@ async fn test_git_status_sync( .await .unwrap(); - let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + let project_remote = client_b.join_remote_project(project_id, cx_b).await; // Wait for it to catch up to the new status executor.run_until_parked(); @@ -2967,7 +2968,7 @@ async fn test_git_status_sync( }); // And synchronization while joining - let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_remote_c = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); project_remote_c.read_with(cx_c, |project, cx| { @@ -3015,7 +3016,7 @@ async fn test_fs_operations( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap()); @@ -3316,7 +3317,7 @@ async fn test_local_settings( executor.run_until_parked(); // As client B, join that project and observe the local settings. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap()); executor.run_until_parked(); @@ -3327,8 +3328,16 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .collect::>(), &[ - (Path::new("").into(), r#"{"tab_size":2}"#.to_string()), - (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), + ( + Path::new("").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":2}"#.to_string() + ), + ( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":8}"#.to_string() + ), ] ) }); @@ -3346,8 +3355,16 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .collect::>(), &[ - (Path::new("").into(), r#"{}"#.to_string()), - (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), + ( + Path::new("").into(), + LocalSettingsKind::Settings, + r#"{}"#.to_string() + ), + ( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":8}"#.to_string() + ), ] ) }); @@ -3375,8 +3392,16 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .collect::>(), &[ - (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), - (Path::new("b").into(), r#"{"tab_size":4}"#.to_string()), + ( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":8}"#.to_string() + ), + ( + Path::new("b").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":4}"#.to_string() + ), ] ) }); @@ -3406,7 +3431,11 @@ async fn test_local_settings( store .local_settings(worktree_b.read(cx).id()) .collect::>(), - &[(Path::new("a").into(), r#"{"hard_tabs":true}"#.to_string()),] + &[( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"hard_tabs":true}"#.to_string() + ),] ) }); } @@ -3439,7 +3468,7 @@ async fn test_buffer_conflict_after_save( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client B let buffer_b = project_b @@ -3503,7 +3532,7 @@ async fn test_buffer_reloading( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client B let buffer_b = project_b @@ -3557,7 +3586,7 @@ async fn test_editing_while_guest_opens_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client A let buffer_a = project_a @@ -3605,7 +3634,7 @@ async fn test_leaving_worktree_while_opening_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // See that a guest has joined as client A. executor.run_until_parked(); @@ -3652,7 +3681,7 @@ async fn test_canceling_buffer_opening( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let buffer_a = project_a .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) @@ -3709,8 +3738,8 @@ async fn test_leaving_project( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b1 = client_b.build_dev_server_project(project_id, cx_b).await; - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_b1 = client_b.join_remote_project(project_id, cx_b).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; // Client A sees that a guest has joined. executor.run_until_parked(); @@ -3751,7 +3780,7 @@ async fn test_leaving_project( }); // Client B re-joins the project and can open buffers as before. - let project_b2 = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b2 = client_b.join_remote_project(project_id, cx_b).await; executor.run_until_parked(); project_a.read_with(cx_a, |project, _| { @@ -3788,8 +3817,8 @@ async fn test_leaving_project( assert_eq!(project.collaborators().len(), 1); }); - project_b2.read_with(cx_b, |project, _| { - assert!(project.is_disconnected()); + project_b2.read_with(cx_b, |project, cx| { + assert!(project.is_disconnected(cx)); }); project_c.read_with(cx_c, |project, _| { @@ -3821,12 +3850,12 @@ async fn test_leaving_project( assert_eq!(project.collaborators().len(), 0); }); - project_b2.read_with(cx_b, |project, _| { - assert!(project.is_disconnected()); + project_b2.read_with(cx_b, |project, cx| { + assert!(project.is_disconnected(cx)); }); - project_c.read_with(cx_c, |project, _| { - assert!(project.is_disconnected()); + project_c.read_with(cx_c, |project, cx| { + assert!(project.is_disconnected(cx)); }); } @@ -3855,11 +3884,11 @@ async fn test_collaborating_with_diagnostics( }, ..Default::default() }, - Some(tree_sitter_rust::language()), + Some(tree_sitter_rust::LANGUAGE.into()), ))); let mut fake_language_servers = client_a .language_registry() - .register_fake_lsp_adapter("Rust", Default::default()); + .register_fake_lsp("Rust", Default::default()); // Share a project as client A client_a @@ -3927,7 +3956,7 @@ async fn test_collaborating_with_diagnostics( ); // Join the worktree as client B. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Wait for server to see the diagnostics update. executor.run_until_parked(); @@ -3952,7 +3981,7 @@ async fn test_collaborating_with_diagnostics( }); // Join project as client C and observe the diagnostics. - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); let project_c_diagnostic_summaries = Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| { @@ -4126,7 +4155,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering( .await; client_a.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { disk_based_diagnostics_progress_token: Some("the-disk-based-token".into()), @@ -4160,7 +4189,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering( .unwrap(); // Join the project as client B and open all three files. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| { project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, file_name), cx)) })) @@ -4266,7 +4295,7 @@ async fn test_reloading_buffer_manually( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); @@ -4349,7 +4378,7 @@ async fn test_formatting_buffer( client_a.language_registry().add(rust_lang()); let mut fake_language_servers = client_a .language_registry() - .register_fake_lsp_adapter("Rust", FakeLspAdapter::default()); + .register_fake_lsp("Rust", FakeLspAdapter::default()); // Here we insert a fake tree with a directory that exists on disk. This is needed // because later we'll invoke a command, which requires passing a working directory @@ -4364,7 +4393,7 @@ async fn test_formatting_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); @@ -4389,6 +4418,7 @@ async fn test_formatting_buffer( HashSet::from_iter([buffer_b.clone()]), true, FormatTrigger::Save, + FormatTarget::Buffer, cx, ) }) @@ -4409,7 +4439,7 @@ async fn test_formatting_buffer( file.defaults.formatter = Some(SelectedFormatter::List(FormatterList( vec![Formatter::External { command: "awk".into(), - arguments: vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(), + arguments: Some(vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into()), }] .into(), ))); @@ -4422,6 +4452,7 @@ async fn test_formatting_buffer( HashSet::from_iter([buffer_b.clone()]), true, FormatTrigger::Save, + FormatTarget::Buffer, cx, ) }) @@ -4458,9 +4489,9 @@ async fn test_prettier_formatting_buffer( }, ..Default::default() }, - Some(tree_sitter_rust::language()), + Some(tree_sitter_rust::LANGUAGE.into()), ))); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "TypeScript", FakeLspAdapter { prettier_plugins: vec![test_plugin], @@ -4486,7 +4517,7 @@ async fn test_prettier_formatting_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); @@ -4527,6 +4558,7 @@ async fn test_prettier_formatting_buffer( HashSet::from_iter([buffer_b.clone()]), true, FormatTrigger::Save, + FormatTarget::Buffer, cx, ) }) @@ -4546,6 +4578,7 @@ async fn test_prettier_formatting_buffer( HashSet::from_iter([buffer_a.clone()]), true, FormatTrigger::Manual, + FormatTarget::Buffer, cx, ) }) @@ -4576,7 +4609,7 @@ async fn test_definition( let mut fake_language_servers = client_a .language_registry() - .register_fake_lsp_adapter("Rust", Default::default()); + .register_fake_lsp("Rust", Default::default()); client_a.language_registry().add(rust_lang()); client_a @@ -4599,7 +4632,7 @@ async fn test_definition( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file on client B. let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); @@ -4712,7 +4745,7 @@ async fn test_references( let active_call_a = cx_a.read(ActiveCall::global); client_a.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { name: "my-fake-lsp-adapter", @@ -4744,7 +4777,7 @@ async fn test_references( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file on client B. let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx)); @@ -4901,7 +4934,7 @@ async fn test_project_search( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Perform a search as the guest. let mut results = HashMap::default(); @@ -4983,7 +5016,7 @@ async fn test_document_highlights( let mut fake_language_servers = client_a .language_registry() - .register_fake_lsp_adapter("Rust", Default::default()); + .register_fake_lsp("Rust", Default::default()); client_a.language_registry().add(rust_lang()); let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await; @@ -4991,7 +5024,7 @@ async fn test_document_highlights( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file on client B. let open_b = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)); @@ -5079,35 +5112,37 @@ async fn test_lsp_hover( client_a.language_registry().add(rust_lang()); let language_server_names = ["rust-analyzer", "CrabLang-ls"]; - let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter( - "Rust", - FakeLspAdapter { - name: "rust-analyzer", - capabilities: lsp::ServerCapabilities { - hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), - ..lsp::ServerCapabilities::default() + let mut language_servers = [ + client_a.language_registry().register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "rust-analyzer", + capabilities: lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..lsp::ServerCapabilities::default() + }, + ..FakeLspAdapter::default() }, - ..FakeLspAdapter::default() - }, - ); - let _other_server = client_a.language_registry().register_fake_lsp_adapter( - "Rust", - FakeLspAdapter { - name: "CrabLang-ls", - capabilities: lsp::ServerCapabilities { - hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), - ..lsp::ServerCapabilities::default() + ), + client_a.language_registry().register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "CrabLang-ls", + capabilities: lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..lsp::ServerCapabilities::default() + }, + ..FakeLspAdapter::default() }, - ..FakeLspAdapter::default() - }, - ); + ), + ]; let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await; let project_id = active_call_a .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file as the guest let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)); @@ -5115,7 +5150,7 @@ async fn test_lsp_hover( let mut servers_with_hover_requests = HashMap::default(); for i in 0..language_server_names.len() { - let new_server = fake_language_servers.next().await.unwrap_or_else(|| { + let new_server = language_servers[i].next().await.unwrap_or_else(|| { panic!( "Failed to get language server #{i} with name {}", &language_server_names[i] @@ -5260,7 +5295,7 @@ async fn test_project_symbols( client_a.language_registry().add(rust_lang()); let mut fake_language_servers = client_a .language_registry() - .register_fake_lsp_adapter("Rust", Default::default()); + .register_fake_lsp("Rust", Default::default()); client_a .fs() @@ -5284,7 +5319,7 @@ async fn test_project_symbols( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Cause the language server to start. let open_buffer_task = @@ -5362,7 +5397,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( client_a.language_registry().add(rust_lang()); let mut fake_language_servers = client_a .language_registry() - .register_fake_lsp_adapter("Rust", Default::default()); + .register_fake_lsp("Rust", Default::default()); client_a .fs() @@ -5379,7 +5414,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer_task = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); let buffer_b1 = cx_b.executor().spawn(open_buffer_task).await.unwrap(); @@ -6468,7 +6503,7 @@ async fn test_context_collaboration_with_reconnect( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Client A sees that a guest has joined. executor.run_until_parked(); diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 2324f03cd423eb..47f6a38073175b 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -298,8 +298,7 @@ impl RandomizedTest for ProjectCollaborationTest { continue; }; let project_root_name = root_name_for_project(&project, cx); - let is_local = - project.read_with(cx, |project, _| project.is_local_or_ssh()); + let is_local = project.read_with(cx, |project, _| project.is_local()); let worktree = project.read_with(cx, |project, cx| { project .worktrees(cx) @@ -335,7 +334,7 @@ impl RandomizedTest for ProjectCollaborationTest { continue; }; let project_root_name = root_name_for_project(&project, cx); - let is_local = project.read_with(cx, |project, _| project.is_local_or_ssh()); + let is_local = project.read_with(cx, |project, _| project.is_local()); match rng.gen_range(0..100_u32) { // Manipulate an existing buffer @@ -1047,7 +1046,7 @@ impl RandomizedTest for ProjectCollaborationTest { }, None, ))); - client.language_registry().register_fake_lsp_adapter( + client.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { name: "the-fake-language-server", @@ -1169,7 +1168,7 @@ impl RandomizedTest for ProjectCollaborationTest { Some((project, cx)) }); - if !guest_project.is_disconnected() { + if !guest_project.is_disconnected(cx) { if let Some((host_project, host_cx)) = host_project { let host_worktree_snapshots = host_project.read_with(host_cx, |host_project, cx| { @@ -1255,8 +1254,8 @@ impl RandomizedTest for ProjectCollaborationTest { let buffers = client.buffers().clone(); for (guest_project, guest_buffers) in &buffers { - let project_id = if guest_project.read_with(client_cx, |project, _| { - project.is_local_or_ssh() || project.is_disconnected() + let project_id = if guest_project.read_with(client_cx, |project, cx| { + project.is_local() || project.is_disconnected(cx) }) { continue; } else { @@ -1560,9 +1559,7 @@ async fn ensure_project_shared( let first_root_name = root_name_for_project(project, cx); let active_call = cx.read(ActiveCall::global); if active_call.read_with(cx, |call, _| call.room().is_some()) - && project.read_with(cx, |project, _| { - project.is_local_or_ssh() && !project.is_shared() - }) + && project.read_with(cx, |project, _| project.is_local() && !project.is_shared()) { match active_call .update(cx, |call, cx| call.share_project(project.clone(), cx)) diff --git a/crates/collab/src/tests/randomized_test_helpers.rs b/crates/collab/src/tests/randomized_test_helpers.rs index c788dd28e0e526..7bf1034ceaab88 100644 --- a/crates/collab/src/tests/randomized_test_helpers.rs +++ b/crates/collab/src/tests/randomized_test_helpers.rs @@ -532,9 +532,9 @@ impl TestPlan { server.allow_connections(); for project in client.dev_server_projects().iter() { - project.read_with(&client_cx, |project, _| { + project.read_with(&client_cx, |project, cx| { assert!( - project.is_disconnected(), + project.is_disconnected(cx), "project {:?} should be read only", project.remote_id() ) diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 21e7f9dd9e87f6..dae33457555ec4 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -2,13 +2,16 @@ use crate::tests::TestServer; use call::ActiveCall; use fs::{FakeFs, Fs as _}; use gpui::{Context as _, TestAppContext}; -use language::language_settings::all_language_settings; -use remote::SshSession; -use remote_server::HeadlessProject; +use http_client::BlockedHttpClient; +use language::{language_settings::all_language_settings, LanguageRegistry}; +use node_runtime::NodeRuntime; +use project::ProjectPath; +use remote::SshRemoteClient; +use remote_server::{HeadlessAppState, HeadlessProject}; use serde_json::json; use std::{path::Path, sync::Arc}; -#[gpui::test] +#[gpui::test(iterations = 10)] async fn test_sharing_an_ssh_remote_project( cx_a: &mut TestAppContext, cx_b: &mut TestAppContext, @@ -23,7 +26,7 @@ async fn test_sharing_an_ssh_remote_project( .await; // Set up project on remote FS - let (client_ssh, server_ssh) = SshSession::fake(cx_a, server_cx); + let (client_ssh, server_ssh) = SshRemoteClient::fake(cx_a, server_cx); let remote_fs = FakeFs::new(server_cx.executor()); remote_fs .insert_tree( @@ -47,14 +50,28 @@ async fn test_sharing_an_ssh_remote_project( // User A connects to the remote project via SSH. server_cx.update(HeadlessProject::init); - let _headless_project = - server_cx.new_model(|cx| HeadlessProject::new(server_ssh, remote_fs.clone(), cx)); + let remote_http_client = Arc::new(BlockedHttpClient); + let node = NodeRuntime::unavailable(); + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + let _headless_project = server_cx.new_model(|cx| { + client::init_settings(cx); + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: remote_http_client, + node_runtime: node, + languages, + }, + cx, + ) + }); let (project_a, worktree_id) = client_a .build_ssh_project("/code/project1", client_ssh, cx_a) .await; - // User A shares the remote project. + // While the SSH worktree is being scanned, user A shares the remote project. let active_call_a = cx_a.read(ActiveCall::global); let project_id = active_call_a .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) @@ -62,12 +79,30 @@ async fn test_sharing_an_ssh_remote_project( .unwrap(); // User B joins the project. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let worktree_b = project_b .update(cx_b, |project, cx| project.worktree_for_id(worktree_id, cx)) .unwrap(); + let worktree_a = project_a + .update(cx_a, |project, cx| project.worktree_for_id(worktree_id, cx)) + .unwrap(); + executor.run_until_parked(); + + worktree_a.update(cx_a, |worktree, _cx| { + assert_eq!( + worktree.paths().map(Arc::as_ref).collect::>(), + vec![ + Path::new(".zed"), + Path::new(".zed/settings.json"), + Path::new("README.md"), + Path::new("src"), + Path::new("src/lib.rs"), + ] + ); + }); + worktree_b.update(cx_b, |worktree, _cx| { assert_eq!( worktree.paths().map(Arc::as_ref).collect::>(), @@ -100,21 +135,43 @@ async fn test_sharing_an_ssh_remote_project( let file = buffer_b.read(cx).file(); assert_eq!( all_language_settings(file, cx) - .language(Some("Rust")) + .language(Some(&("Rust".into()))) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); project_b - .update(cx_b, |project, cx| project.save_buffer(buffer_b, cx)) + .update(cx_b, |project, cx| { + project.save_buffer_as( + buffer_b.clone(), + ProjectPath { + worktree_id: worktree_id.to_owned(), + path: Arc::from(Path::new("src/renamed.rs")), + }, + cx, + ) + }) .await .unwrap(); assert_eq!( remote_fs - .load("/code/project1/src/lib.rs".as_ref()) + .load("/code/project1/src/renamed.rs".as_ref()) .await .unwrap(), "fn one() -> usize { 100 }" ); + cx_b.run_until_parked(); + cx_b.update(|cx| { + assert_eq!( + buffer_b + .read(cx) + .file() + .unwrap() + .path() + .to_string_lossy() + .to_string(), + "src/renamed.rs".to_string() + ); + }); } diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index e691afceda7e08..210a049e0bcb30 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -21,11 +21,11 @@ use git::GitHostingProviderRegistry; use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext}; use http_client::FakeHttpClient; use language::LanguageRegistry; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use notifications::NotificationStore; use parking_lot::Mutex; use project::{Project, WorktreeId}; -use remote::SshSession; +use remote::SshRemoteClient; use rpc::{ proto::{self, ChannelRole}, RECEIVE_TIMEOUT, @@ -278,7 +278,7 @@ impl TestServer { languages: language_registry, fs: fs.clone(), build_window_options: |_, _| Default::default(), - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), session, }); @@ -408,7 +408,7 @@ impl TestServer { languages: language_registry, fs: fs.clone(), build_window_options: |_, _| Default::default(), - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), session, }); @@ -635,9 +635,11 @@ impl TestServer { ) -> Arc { Arc::new(AppState { db: test_db.db().clone(), + llm_db: None, live_kit_client: Some(Arc::new(live_kit_test_server.create_api_client())), blob_store_client: None, stripe_client: None, + stripe_billing: None, rate_limiter: Arc::new(RateLimiter::new(test_db.db().clone())), executor, clickhouse_client: None, @@ -677,10 +679,7 @@ impl TestServer { migrations_path: None, seed_path: None, stripe_api_key: None, - stripe_price_id: None, supermaven_admin_api_key: None, - qwen2_7b_api_key: None, - qwen2_7b_api_url: None, user_backfiller_github_access_token: None, }, }) @@ -837,7 +836,7 @@ impl TestClient { pub async fn build_ssh_project( &self, root_path: impl AsRef, - ssh: Arc, + ssh: Model, cx: &mut TestAppContext, ) -> (Model, WorktreeId) { let project = cx.update(|cx| { @@ -921,7 +920,7 @@ impl TestClient { }) } - pub async fn build_dev_server_project( + pub async fn join_remote_project( &self, host_project_id: u64, guest_cx: &mut TestAppContext, diff --git a/crates/collab_ui/src/chat_panel/message_editor.rs b/crates/collab_ui/src/chat_panel/message_editor.rs index 028e148cbac039..7023a8d07ed76c 100644 --- a/crates/collab_ui/src/chat_panel/message_editor.rs +++ b/crates/collab_ui/src/chat_panel/message_editor.rs @@ -111,7 +111,7 @@ impl MessageEditor { editor.set_show_gutter(false, cx); editor.set_show_wrap_guides(false, cx); editor.set_show_indent_guides(false, cx); - editor.set_completion_provider(Box::new(MessageEditorCompletionProvider(this))); + editor.set_completion_provider(Some(Box::new(MessageEditorCompletionProvider(this)))); editor.set_auto_replace_emoji_shortcode( MessageEditorSettings::get_global(cx) .auto_replace_emoji_shortcode @@ -228,10 +228,10 @@ impl MessageEditor { fn on_buffer_event( &mut self, buffer: Model, - event: &language::Event, + event: &language::BufferEvent, cx: &mut ViewContext, ) { - if let language::Event::Reparsed | language::Event::Edited = event { + if let language::BufferEvent::Reparsed | language::BufferEvent::Edited = event { let buffer = buffer.read(cx).snapshot(); self.mentions_task = Some(cx.spawn(|this, cx| async move { cx.background_executor() diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 72701101816995..59f83e06548a6b 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2831,7 +2831,7 @@ impl Panel for CollabPanel { fn icon(&self, cx: &gpui::WindowContext) -> Option { CollaborationPanelSettings::get_global(cx) .button - .then_some(ui::IconName::Collab) + .then_some(ui::IconName::UserGroup) } fn icon_tooltip(&self, _cx: &WindowContext) -> Option<&'static str> { diff --git a/crates/context_servers/Cargo.toml b/crates/context_servers/Cargo.toml index 21bf6a1fc86166..9c0336f1217ef0 100644 --- a/crates/context_servers/Cargo.toml +++ b/crates/context_servers/Cargo.toml @@ -14,6 +14,7 @@ path = "src/context_servers.rs" [dependencies] anyhow.workspace = true collections.workspace = true +command_palette_hooks.workspace = true futures.workspace = true gpui.workspace = true log.workspace = true diff --git a/crates/context_servers/src/client.rs b/crates/context_servers/src/client.rs index aff186b115672b..6681023c008711 100644 --- a/crates/context_servers/src/client.rs +++ b/crates/context_servers/src/client.rs @@ -26,7 +26,7 @@ const JSON_RPC_VERSION: &str = "2.0"; const REQUEST_TIMEOUT: Duration = Duration::from_secs(60); type ResponseHandler = Box)>; -type NotificationHandler = Box; +type NotificationHandler = Box; #[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] #[serde(untagged)] @@ -94,7 +94,6 @@ enum CspResult { #[derive(Serialize, Deserialize)] struct Notification<'a, T> { jsonrpc: &'static str, - id: RequestId, #[serde(borrow)] method: &'a str, params: T, @@ -103,7 +102,6 @@ struct Notification<'a, T> { #[derive(Debug, Clone, Deserialize)] struct AnyNotification<'a> { jsonrpc: &'a str, - id: RequestId, method: String, #[serde(default)] params: Option, @@ -246,11 +244,7 @@ impl Client { if let Some(handler) = notification_handlers.get_mut(notification.method.as_str()) { - handler( - notification.id, - notification.params.unwrap_or(Value::Null), - cx.clone(), - ); + handler(notification.params.unwrap_or(Value::Null), cx.clone()); } } } @@ -378,10 +372,8 @@ impl Client { /// Sends a notification to the context server without expecting a response. /// This function serializes the notification and sends it through the outbound channel. pub fn notify(&self, method: &str, params: impl Serialize) -> Result<()> { - let id = self.next_id.fetch_add(1, SeqCst); let notification = serde_json::to_string(&Notification { jsonrpc: JSON_RPC_VERSION, - id: RequestId::Int(id), method, params, }) @@ -390,13 +382,13 @@ impl Client { Ok(()) } - pub fn on_notification(&self, method: &'static str, mut f: F) + pub fn on_notification(&self, method: &'static str, f: F) where F: 'static + Send + FnMut(Value, AsyncAppContext), { self.notification_handlers .lock() - .insert(method, Box::new(move |_, params, cx| f(params, cx))); + .insert(method, Box::new(f)); } pub fn name(&self) -> &str { diff --git a/crates/context_servers/src/context_servers.rs b/crates/context_servers/src/context_servers.rs index 3333f95f9fe7aa..55634bb77cc944 100644 --- a/crates/context_servers/src/context_servers.rs +++ b/crates/context_servers/src/context_servers.rs @@ -12,6 +12,9 @@ pub use registry::*; actions!(context_servers, [Restart]); +/// The namespace for the context servers actions. +const CONTEXT_SERVERS_NAMESPACE: &'static str = "context_servers"; + pub fn init(cx: &mut AppContext) { log::info!("initializing context server client"); manager::init(cx); diff --git a/crates/context_servers/src/manager.rs b/crates/context_servers/src/manager.rs index 1596a54eb9bfc6..3c21fd53fb5824 100644 --- a/crates/context_servers/src/manager.rs +++ b/crates/context_servers/src/manager.rs @@ -15,6 +15,7 @@ //! and react to changes in settings. use collections::{HashMap, HashSet}; +use command_palette_hooks::CommandPaletteFilter; use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Task}; use log; use parking_lot::RwLock; @@ -24,6 +25,7 @@ use settings::{Settings, SettingsSources, SettingsStore}; use std::path::Path; use std::sync::Arc; +use crate::CONTEXT_SERVERS_NAMESPACE; use crate::{ client::{self, Client}, types, @@ -83,7 +85,7 @@ impl ContextServer { )?; let protocol = crate::protocol::ModelContextProtocol::new(client); - let client_info = types::EntityInfo { + let client_info = types::Implementation { name: "Zed".to_string(), version: env!("CARGO_PKG_VERSION").to_string(), }; @@ -148,26 +150,28 @@ impl ContextServerManager { cx: &mut ModelContext, ) -> Task> { let server_id = config.id.clone(); - let server_id2 = config.id.clone(); if self.servers.contains_key(&server_id) || self.pending_servers.contains(&server_id) { return Task::ready(Ok(())); } - let task = cx.spawn(|this, mut cx| async move { - let server = Arc::new(ContextServer::new(config)); - server.start(&cx).await?; - this.update(&mut cx, |this, cx| { - this.servers.insert(server_id.clone(), server); - this.pending_servers.remove(&server_id); - cx.emit(Event::ServerStarted { - server_id: server_id.clone(), - }); - })?; - Ok(()) - }); + let task = { + let server_id = server_id.clone(); + cx.spawn(|this, mut cx| async move { + let server = Arc::new(ContextServer::new(config)); + server.start(&cx).await?; + this.update(&mut cx, |this, cx| { + this.servers.insert(server_id.clone(), server); + this.pending_servers.remove(&server_id); + cx.emit(Event::ServerStarted { + server_id: server_id.clone(), + }); + })?; + Ok(()) + }) + }; - self.pending_servers.insert(server_id2); + self.pending_servers.insert(server_id); task } @@ -243,15 +247,20 @@ impl GlobalContextServerManager { pub fn init(cx: &mut AppContext) { ContextServerSettings::register(cx); GlobalContextServerManager::register(cx); + + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.hide_namespace(CONTEXT_SERVERS_NAMESPACE); + }); + cx.observe_global::(|cx| { let manager = ContextServerManager::global(cx); cx.update_model(&manager, |manager, cx| { let settings = ContextServerSettings::get_global(cx); - let current_servers: HashMap = manager + let current_servers = manager .servers() .into_iter() .map(|server| (server.id.clone(), server.config.clone())) - .collect(); + .collect::>(); let new_servers = settings .servers @@ -279,6 +288,15 @@ pub fn init(cx: &mut AppContext) { for id in servers_to_remove { manager.remove_server(&id, cx).detach_and_log_err(cx); } + + let has_any_context_servers = !manager.servers().is_empty(); + CommandPaletteFilter::update_global(cx, |filter, _cx| { + if has_any_context_servers { + filter.show_namespace(CONTEXT_SERVERS_NAMESPACE); + } else { + filter.hide_namespace(CONTEXT_SERVERS_NAMESPACE); + } + }); }) }) .detach(); diff --git a/crates/context_servers/src/protocol.rs b/crates/context_servers/src/protocol.rs index 87da217f7d1936..451db56ef31df0 100644 --- a/crates/context_servers/src/protocol.rs +++ b/crates/context_servers/src/protocol.rs @@ -11,8 +11,6 @@ use collections::HashMap; use crate::client::Client; use crate::types; -pub use types::PromptInfo; - const PROTOCOL_VERSION: u32 = 1; pub struct ModelContextProtocol { @@ -26,7 +24,7 @@ impl ModelContextProtocol { pub async fn initialize( self, - client_info: types::EntityInfo, + client_info: types::Implementation, ) -> Result { let params = types::InitializeParams { protocol_version: PROTOCOL_VERSION, @@ -96,7 +94,7 @@ impl InitializedContextServerProtocol { } /// List the MCP prompts. - pub async fn list_prompts(&self) -> Result> { + pub async fn list_prompts(&self) -> Result> { self.check_capability(ServerCapability::Prompts)?; let response: types::PromptsListResponse = self @@ -107,6 +105,18 @@ impl InitializedContextServerProtocol { Ok(response.prompts) } + /// List the MCP resources. + pub async fn list_resources(&self) -> Result { + self.check_capability(ServerCapability::Resources)?; + + let response: types::ResourcesListResponse = self + .inner + .request(types::RequestType::ResourcesList.as_str(), ()) + .await?; + + Ok(response) + } + /// Executes a prompt with the given arguments and returns the result. pub async fn run_prompt>( &self, diff --git a/crates/context_servers/src/types.rs b/crates/context_servers/src/types.rs index c0e9a79f1589c5..04ac87c704d06c 100644 --- a/crates/context_servers/src/types.rs +++ b/crates/context_servers/src/types.rs @@ -15,6 +15,7 @@ pub enum RequestType { PromptsGet, PromptsList, CompletionComplete, + Ping, } impl RequestType { @@ -30,6 +31,7 @@ impl RequestType { RequestType::PromptsGet => "prompts/get", RequestType::PromptsList => "prompts/list", RequestType::CompletionComplete => "completion/complete", + RequestType::Ping => "ping", } } } @@ -39,14 +41,15 @@ impl RequestType { pub struct InitializeParams { pub protocol_version: u32, pub capabilities: ClientCapabilities, - pub client_info: EntityInfo, + pub client_info: Implementation, } #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] pub struct CallToolParams { pub name: String, - pub arguments: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub arguments: Option>, } #[derive(Debug, Serialize)] @@ -77,6 +80,7 @@ pub struct LoggingSetLevelParams { #[serde(rename_all = "camelCase")] pub struct PromptsGetParams { pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] pub arguments: Option>, } @@ -101,6 +105,13 @@ pub struct PromptReference { pub name: String, } +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ResourceReference { + pub r#type: PromptReferenceType, + pub uri: Url, +} + #[derive(Debug, Serialize)] #[serde(rename_all = "snake_case")] pub enum PromptReferenceType { @@ -110,13 +121,6 @@ pub enum PromptReferenceType { Resource, } -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct ResourceReference { - pub r#type: String, - pub uri: String, -} - #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionArgument { @@ -129,7 +133,7 @@ pub struct CompletionArgument { pub struct InitializeResponse { pub protocol_version: u32, pub capabilities: ServerCapabilities, - pub server_info: EntityInfo, + pub server_info: Implementation, } #[derive(Debug, Deserialize)] @@ -141,13 +145,39 @@ pub struct ResourcesReadResponse { #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ResourcesListResponse { + #[serde(skip_serializing_if = "Option::is_none")] pub resource_templates: Option>, - pub resources: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub resources: Option>, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SamplingMessage { + pub role: SamplingRole, + pub content: SamplingContent, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum SamplingRole { + User, + Assistant, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum SamplingContent { + #[serde(rename = "text")] + Text { text: String }, + #[serde(rename = "image")] + Image { data: String, mime_type: String }, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct PromptsGetResponse { + #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, pub prompt: String, } @@ -155,7 +185,7 @@ pub struct PromptsGetResponse { #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct PromptsListResponse { - pub prompts: Vec, + pub prompts: Vec, } #[derive(Debug, Deserialize)] @@ -168,61 +198,91 @@ pub struct CompletionCompleteResponse { #[serde(rename_all = "camelCase")] pub struct CompletionResult { pub values: Vec, + #[serde(skip_serializing_if = "Option::is_none")] pub total: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub has_more: Option, } -#[derive(Debug, Deserialize, Clone)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -pub struct PromptInfo { +pub struct Prompt { pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub arguments: Option>, } -#[derive(Debug, Deserialize, Clone)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct PromptArgument { pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub required: Option, } -// Shared Types - #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ClientCapabilities { + #[serde(skip_serializing_if = "Option::is_none")] pub experimental: Option>, - pub sampling: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub sampling: Option, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ServerCapabilities { + #[serde(skip_serializing_if = "Option::is_none")] pub experimental: Option>, - pub logging: Option>, - pub prompts: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub logging: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub prompts: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub resources: Option, - pub tools: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PromptsCapabilities { + #[serde(skip_serializing_if = "Option::is_none")] + pub list_changed: Option, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ResourcesCapabilities { + #[serde(skip_serializing_if = "Option::is_none")] pub subscribe: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub list_changed: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ToolsCapabilities { + #[serde(skip_serializing_if = "Option::is_none")] + pub list_changed: Option, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Tool { pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, pub input_schema: serde_json::Value, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct EntityInfo { +pub struct Implementation { pub name: String, pub version: String, } @@ -231,6 +291,10 @@ pub struct EntityInfo { #[serde(rename_all = "camelCase")] pub struct Resource { pub uri: Url, + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub mime_type: Option, } @@ -238,18 +302,23 @@ pub struct Resource { #[serde(rename_all = "camelCase")] pub struct ResourceContent { pub uri: Url, + #[serde(skip_serializing_if = "Option::is_none")] pub mime_type: Option, - pub content_type: String, + #[serde(skip_serializing_if = "Option::is_none")] pub text: Option, - pub data: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub blob: Option, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ResourceTemplate { pub uri_template: String, - pub name: Option, + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub mime_type: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -261,13 +330,16 @@ pub enum LoggingLevel { Error, } -// Client Notifications - #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] pub enum NotificationType { Initialized, Progress, + Message, + ResourcesUpdated, + ResourcesListChanged, + ToolsListChanged, + PromptsListChanged, } impl NotificationType { @@ -275,6 +347,11 @@ impl NotificationType { match self { NotificationType::Initialized => "notifications/initialized", NotificationType::Progress => "notifications/progress", + NotificationType::Message => "notifications/message", + NotificationType::ResourcesUpdated => "notifications/resources/updated", + NotificationType::ResourcesListChanged => "notifications/resources/list_changed", + NotificationType::ToolsListChanged => "notifications/tools/list_changed", + NotificationType::PromptsListChanged => "notifications/prompts/list_changed", } } } @@ -289,12 +366,13 @@ pub enum ClientNotification { #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] pub struct ProgressParams { - pub progress_token: String, + pub progress_token: ProgressToken, pub progress: f64, + #[serde(skip_serializing_if = "Option::is_none")] pub total: Option, } -// Helper Types that don't map directly to the protocol +pub type ProgressToken = String; pub enum CompletionTotal { Exact(u32), diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml index 54abbaa112060b..2a54497562a243 100644 --- a/crates/copilot/Cargo.toml +++ b/crates/copilot/Cargo.toml @@ -37,7 +37,6 @@ fs.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true language.workspace = true lsp.workspace = true menu.workspace = true diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 5fc86480e6577a..a1fd7a9bb96683 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -57,7 +57,7 @@ pub fn init( new_server_id: LanguageServerId, fs: Arc, http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, cx: &mut AppContext, ) { copilot_chat::init(fs, http.clone(), cx); @@ -302,7 +302,7 @@ pub struct Completion { pub struct Copilot { http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, server: CopilotServer, buffers: HashSet>, server_id: LanguageServerId, @@ -334,7 +334,7 @@ impl Copilot { fn start( new_server_id: LanguageServerId, http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, cx: &mut ModelContext, ) -> Self { let mut this = Self { @@ -392,7 +392,7 @@ impl Copilot { #[cfg(any(test, feature = "test-support"))] pub fn fake(cx: &mut gpui::TestAppContext) -> (Model, lsp::FakeLanguageServer) { use lsp::FakeLanguageServer; - use node_runtime::FakeNodeRuntime; + use node_runtime::NodeRuntime; let (server, fake_server) = FakeLanguageServer::new( LanguageServerId(0), @@ -406,7 +406,7 @@ impl Copilot { cx.to_async(), ); let http = http_client::FakeHttpClient::create(|_| async { unreachable!() }); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); let this = cx.new_model(|cx| Self { server_id: LanguageServerId(0), http: http.clone(), @@ -425,7 +425,7 @@ impl Copilot { async fn start_language_server( new_server_id: LanguageServerId, http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, this: WeakModel, mut cx: AsyncAppContext, ) { @@ -691,17 +691,17 @@ impl Copilot { fn handle_buffer_event( &mut self, buffer: Model, - event: &language::Event, + event: &language::BufferEvent, cx: &mut ModelContext, ) -> Result<()> { if let Ok(server) = self.server.as_running() { if let Some(registered_buffer) = server.registered_buffers.get_mut(&buffer.entity_id()) { match event { - language::Event::Edited => { + language::BufferEvent::Edited => { drop(registered_buffer.report_changes(&buffer, cx)); } - language::Event::Saved => { + language::BufferEvent::Saved => { server .lsp .notify::( @@ -713,7 +713,8 @@ impl Copilot { }, )?; } - language::Event::FileHandleChanged | language::Event::LanguageChanged => { + language::BufferEvent::FileHandleChanged + | language::BufferEvent::LanguageChanged => { let new_language_id = id_for_language(buffer.read(cx).language()); let new_uri = uri_for_buffer(&buffer, cx); if new_uri != registered_buffer.uri diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index 5d80c89a6649dd..c5ba1bfc6a5895 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -7,8 +7,7 @@ use chrono::DateTime; use fs::Fs; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt}; use gpui::{AppContext, AsyncAppContext, Global}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use paths::home_dir; use serde::{Deserialize, Serialize}; use settings::watch_config_file; @@ -275,7 +274,7 @@ async fn request_api_token( .header("Accept", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let request = request_builder.body(AsyncBody::empty())?; @@ -332,7 +331,7 @@ async fn stream_completion( .header("Copilot-Integration-Id", "vscode-chat"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index 41ba59a0d51d8f..3a3361cda1996d 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -1,14 +1,14 @@ use crate::{Completion, Copilot}; use anyhow::Result; use client::telemetry::Telemetry; -use editor::{Direction, InlineCompletionProvider}; +use editor::{CompletionProposal, Direction, InlayProposal, InlineCompletionProvider}; use gpui::{AppContext, EntityId, Model, ModelContext, Task}; use language::{ language_settings::{all_language_settings, AllLanguageSettings}, Buffer, OffsetRangeExt, ToOffset, }; use settings::Settings; -use std::{ops::Range, path::Path, sync::Arc, time::Duration}; +use std::{path::Path, sync::Arc, time::Duration}; pub const COPILOT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); @@ -237,7 +237,7 @@ impl InlineCompletionProvider for CopilotCompletionProvider { buffer: &Model, cursor_position: language::Anchor, cx: &'a AppContext, - ) -> Option<(&'a str, Option>)> { + ) -> Option { let buffer_id = buffer.entity_id(); let buffer = buffer.read(cx); let completion = self.active_completion()?; @@ -267,7 +267,14 @@ impl InlineCompletionProvider for CopilotCompletionProvider { if completion_text.trim().is_empty() { None } else { - Some((completion_text, None)) + Some(CompletionProposal { + inlays: vec![InlayProposal::Suggestion( + cursor_position.bias_right(buffer), + completion_text.into(), + )], + text: completion_text.into(), + delete_range: None, + }) } } else { None @@ -760,7 +767,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("c = 3\nd = 4\n", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ExcerptRange { @@ -1011,7 +1018,7 @@ mod tests { .unwrap(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); multibuffer.push_excerpts( private_buffer.clone(), [ExcerptRange { diff --git a/crates/copilot/src/sign_in.rs b/crates/copilot/src/sign_in.rs index 1d14e5c1aadc55..da6b969b7222bb 100644 --- a/crates/copilot/src/sign_in.rs +++ b/crates/copilot/src/sign_in.rs @@ -1,10 +1,10 @@ use crate::{request::PromptUserDeviceFlow, Copilot, Status}; use gpui::{ - div, svg, AppContext, ClipboardItem, DismissEvent, Element, EventEmitter, FocusHandle, + div, AppContext, ClipboardItem, DismissEvent, Element, EventEmitter, FocusHandle, FocusableView, InteractiveElement, IntoElement, Model, MouseDownEvent, ParentElement, Render, Styled, Subscription, ViewContext, }; -use ui::{prelude::*, Button, IconName, Label}; +use ui::{prelude::*, Button, Label, Vector, VectorName}; use workspace::ModalView; const COPILOT_SIGN_UP_URL: &str = "https://github.com/features/copilot"; @@ -198,12 +198,8 @@ impl Render for CopilotCodeVerification { cx.focus(&this.focus_handle); })) .child( - svg() - .w_32() - .h_16() - .flex_none() - .path(IconName::ZedXCopilot.path()) - .text_color(cx.theme().colors().icon), + Vector::new(VectorName::ZedXCopilot, rems(8.), rems(4.)) + .color(Color::Custom(cx.theme().colors().icon)), ) .child(prompt) } diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs index 768f382203020d..98fca60d6312c9 100644 --- a/crates/db/src/db.rs +++ b/crates/db/src/db.rs @@ -11,16 +11,14 @@ pub use smol; pub use sqlez; pub use sqlez_macros; -use release_channel::ReleaseChannel; pub use release_channel::RELEASE_CHANNEL; use sqlez::domain::Migrator; use sqlez::thread_safe_connection::ThreadSafeConnection; use sqlez_macros::sql; -use std::env; use std::future::Future; use std::path::Path; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::LazyLock; +use std::sync::{atomic::Ordering, LazyLock}; +use std::{env, sync::atomic::AtomicBool}; use util::{maybe, ResultExt}; const CONNECTION_INITIALIZE_QUERY: &str = sql!( @@ -47,16 +45,12 @@ pub static ALL_FILE_DB_FAILED: LazyLock = LazyLock::new(|| AtomicBoo /// This will retry a couple times if there are failures. If opening fails once, the db directory /// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created. /// In either case, static variables are set so that the user can be notified. -pub async fn open_db( - db_dir: &Path, - release_channel: &ReleaseChannel, -) -> ThreadSafeConnection { +pub async fn open_db(db_dir: &Path, scope: &str) -> ThreadSafeConnection { if *ZED_STATELESS { return open_fallback_db().await; } - let release_channel_name = release_channel.dev_name(); - let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name))); + let main_db_dir = db_dir.join(format!("0-{}", scope)); let connection = maybe!(async { smol::fs::create_dir_all(&main_db_dir) @@ -118,7 +112,7 @@ pub async fn open_test_db(db_name: &str) -> ThreadSafeConnection /// Implements a basic DB wrapper for a given domain #[macro_export] macro_rules! define_connection { - (pub static ref $id:ident: $t:ident<()> = $migrations:expr;) => { + (pub static ref $id:ident: $t:ident<()> = $migrations:expr; $($global:ident)?) => { pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>); impl ::std::ops::Deref for $t { @@ -139,18 +133,23 @@ macro_rules! define_connection { } } - use std::sync::LazyLock; #[cfg(any(test, feature = "test-support"))] - pub static $id: LazyLock<$t> = LazyLock::new(|| { + pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { $t($crate::smol::block_on($crate::open_test_db(stringify!($id)))) }); #[cfg(not(any(test, feature = "test-support")))] - pub static $id: LazyLock<$t> = LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL))) + pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { + let db_dir = $crate::database_dir(); + let scope = if false $(|| stringify!($global) == "global")? { + "global" + } else { + $crate::RELEASE_CHANNEL.dev_name() + }; + $t($crate::smol::block_on($crate::open_db(db_dir, scope))) }); }; - (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr;) => { + (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr; $($global:ident)?) => { pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>); impl ::std::ops::Deref for $t { @@ -178,12 +177,18 @@ macro_rules! define_connection { #[cfg(not(any(test, feature = "test-support")))] pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL))) + let db_dir = $crate::database_dir(); + let scope = if false $(|| stringify!($global) == "global")? { + "global" + } else { + $crate::RELEASE_CHANNEL.dev_name() + }; + $t($crate::smol::block_on($crate::open_db(db_dir, scope))) }); }; } -pub fn write_and_log(cx: &mut AppContext, db_write: impl FnOnce() -> F + Send + 'static) +pub fn write_and_log(cx: &AppContext, db_write: impl FnOnce() -> F + Send + 'static) where F: Future> + Send, { @@ -225,7 +230,11 @@ mod tests { .prefix("DbTests") .tempdir() .unwrap(); - let _bad_db = open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let _bad_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; } /// Test that DB exists but corrupted (causing recreate) @@ -262,13 +271,19 @@ mod tests { .tempdir() .unwrap(); { - let corrupt_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let corrupt_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!(corrupt_db.persistent()); } - let good_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let good_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!( good_db.select_row::("SELECT * FROM test2").unwrap()() .unwrap() @@ -311,8 +326,11 @@ mod tests { .unwrap(); { // Setup the bad database - let corrupt_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let corrupt_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!(corrupt_db.persistent()); } @@ -323,7 +341,7 @@ mod tests { let guard = thread::spawn(move || { let good_db = smol::block_on(open_db::( tmp_path.as_path(), - &release_channel::ReleaseChannel::Dev, + &release_channel::ReleaseChannel::Dev.dev_name(), )); assert!( good_db.select_row::("SELECT * FROM test2").unwrap()() diff --git a/crates/db/src/kvp.rs b/crates/db/src/kvp.rs index 0b0cdd9aa11177..c9d994d34da7d1 100644 --- a/crates/db/src/kvp.rs +++ b/crates/db/src/kvp.rs @@ -60,3 +60,33 @@ mod tests { assert_eq!(db.read_kvp("key-1").unwrap(), None); } } + +define_connection!(pub static ref GLOBAL_KEY_VALUE_STORE: GlobalKeyValueStore<()> = + &[sql!( + CREATE TABLE IF NOT EXISTS kv_store( + key TEXT PRIMARY KEY, + value TEXT NOT NULL + ) STRICT; + )]; + global +); + +impl GlobalKeyValueStore { + query! { + pub fn read_kvp(key: &str) -> Result> { + SELECT value FROM kv_store WHERE key = (?) + } + } + + query! { + pub async fn write_kvp(key: String, value: String) -> Result<()> { + INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?)) + } + } + + query! { + pub async fn delete_kvp(key: String) -> Result<()> { + DELETE FROM kv_store WHERE key = (?) + } + } +} diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index ced97be2dc87cd..687638854209ba 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -156,12 +156,7 @@ impl ProjectDiagnosticsEditor { cx.on_focus_out(&focus_handle, |this, _event, cx| this.focus_out(cx)) .detach(); - let excerpts = cx.new_model(|cx| { - MultiBuffer::new( - project_handle.read(cx).replica_id(), - project_handle.read(cx).capability(), - ) - }); + let excerpts = cx.new_model(|cx| MultiBuffer::new(project_handle.read(cx).capability())); let editor = cx.new_view(|cx| { let mut editor = Editor::for_multibuffer(excerpts.clone(), Some(project_handle.clone()), false, cx); @@ -645,37 +640,42 @@ impl Item for ProjectDiagnosticsEditor { } fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement { - if self.summary.error_count == 0 && self.summary.warning_count == 0 { - Label::new("No problems") - .color(params.text_color()) - .into_any_element() - } else { - h_flex() - .gap_1() - .when(self.summary.error_count > 0, |then| { - then.child( - h_flex() - .gap_1() - .child(Icon::new(IconName::XCircle).color(Color::Error)) - .child( - Label::new(self.summary.error_count.to_string()) - .color(params.text_color()), - ), - ) - }) - .when(self.summary.warning_count > 0, |then| { + h_flex() + .gap_1() + .when( + self.summary.error_count == 0 && self.summary.warning_count == 0, + |then| { then.child( h_flex() .gap_1() - .child(Icon::new(IconName::ExclamationTriangle).color(Color::Warning)) - .child( - Label::new(self.summary.warning_count.to_string()) - .color(params.text_color()), - ), + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(Label::new("No problems").color(params.text_color())), ) - }) - .into_any_element() - } + }, + ) + .when(self.summary.error_count > 0, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child( + Label::new(self.summary.error_count.to_string()) + .color(params.text_color()), + ), + ) + }) + .when(self.summary.warning_count > 0, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Warning).color(Color::Warning)) + .child( + Label::new(self.summary.warning_count.to_string()) + .color(params.text_color()), + ), + ) + }) + .into_any_element() } fn telemetry_event_text(&self) -> Option<&'static str> { @@ -804,7 +804,7 @@ fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock { icon.path(IconName::XCircle.path()) .text_color(Color::Error.color(cx)) } else { - icon.path(IconName::ExclamationTriangle.path()) + icon.path(IconName::Warning.path()) .text_color(Color::Warning.color(cx)) } }), diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index 75bfd6415cc7b3..1daffffb4eabcf 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -962,7 +962,6 @@ fn random_diagnostic( const FILE_HEADER: &str = "file header"; const EXCERPT_HEADER: &str = "excerpt header"; -const EXCERPT_FOOTER: &str = "excerpt footer"; fn editor_blocks( editor: &View, @@ -998,7 +997,7 @@ fn editor_blocks( .ok()? } - Block::ExcerptHeader { + Block::ExcerptBoundary { starts_new_buffer, .. } => { if *starts_new_buffer { @@ -1007,7 +1006,6 @@ fn editor_blocks( EXCERPT_HEADER.into() } } - Block::ExcerptFooter { .. } => EXCERPT_FOOTER.into(), }; Some((row, name)) diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 80b31b999c653b..72a4ac9bcfb01e 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -30,7 +30,7 @@ impl Render for DiagnosticIndicator { (0, warning_count) => h_flex() .gap_1() .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) @@ -52,7 +52,7 @@ impl Render for DiagnosticIndicator { ) .child(Label::new(error_count.to_string()).size(LabelSize::Small)) .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) diff --git a/crates/diagnostics/src/toolbar_controls.rs b/crates/diagnostics/src/toolbar_controls.rs index 64eb2683047171..0d3000814262ad 100644 --- a/crates/diagnostics/src/toolbar_controls.rs +++ b/crates/diagnostics/src/toolbar_controls.rs @@ -1,7 +1,7 @@ use crate::ProjectDiagnosticsEditor; use gpui::{EventEmitter, ParentElement, Render, View, ViewContext, WeakView}; use ui::prelude::*; -use ui::{IconButton, IconName, Tooltip}; +use ui::{IconButton, IconButtonShape, IconName, Tooltip}; use workspace::{item::ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView}; pub struct ToolbarControls { @@ -33,11 +33,19 @@ impl Render for ToolbarControls { "Include Warnings" }; + let warning_color = if include_warnings { + Color::Warning + } else { + Color::Muted + }; + h_flex() + .gap_1() .when(has_stale_excerpts, |div| { div.child( IconButton::new("update-excerpts", IconName::Update) .icon_color(Color::Info) + .shape(IconButtonShape::Square) .disabled(is_updating) .tooltip(move |cx| Tooltip::text("Update excerpts", cx)) .on_click(cx.listener(|this, _, cx| { @@ -50,7 +58,9 @@ impl Render for ToolbarControls { ) }) .child( - IconButton::new("toggle-warnings", IconName::ExclamationTriangle) + IconButton::new("toggle-warnings", IconName::Warning) + .icon_color(warning_color) + .shape(IconButtonShape::Square) .tooltip(move |cx| Tooltip::text(tooltip, cx)) .on_click(cx.listener(|this, _, cx| { if let Some(editor) = this.editor() { diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 324201b41ebaa0..cfd9284f807650 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -24,7 +24,8 @@ test-support = [ "workspace/test-support", "tree-sitter-rust", "tree-sitter-typescript", - "tree-sitter-html" + "tree-sitter-html", + "unindent", ] [dependencies] @@ -35,7 +36,7 @@ chrono.workspace = true client.workspace = true clock.workspace = true collections.workspace = true -convert_case = "0.6.0" +convert_case.workspace = true db.workspace = true emojis.workspace = true file_icons.workspace = true @@ -54,6 +55,7 @@ markdown.workspace = true multi_buffer.workspace = true ordered-float.workspace = true parking_lot.workspace = true +pretty_assertions.workspace = true project.workspace = true rand.workspace = true rpc.workspace = true @@ -61,6 +63,7 @@ schemars.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +similar.workspace = true smallvec.workspace = true smol.workspace = true snippet.workspace = true @@ -73,6 +76,7 @@ theme.workspace = true tree-sitter-html = { workspace = true, optional = true } tree-sitter-rust = { workspace = true, optional = true } tree-sitter-typescript = { workspace = true, optional = true } +unindent = { workspace = true, optional = true } ui.workspace = true url.workspace = true util.workspace = true diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 319286d252d793..4955f00c382334 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -156,14 +156,14 @@ pub struct DeleteToPreviousWordStart { impl_actions!( editor, [ + ComposeCompletion, ConfirmCodeAction, ConfirmCompletion, - ComposeCompletion, DeleteToNextWordEnd, DeleteToPreviousWordStart, ExpandExcerpts, - ExpandExcerptsUp, ExpandExcerptsDown, + ExpandExcerptsUp, FoldAt, HandleInput, MoveDownByLines, @@ -188,11 +188,12 @@ impl_actions!( gpui::actions!( editor, [ - AcceptPartialCopilotSuggestion, AcceptInlineCompletion, + AcceptPartialCopilotSuggestion, AcceptPartialInlineCompletion, AddSelectionAbove, AddSelectionBelow, + ApplyDiffHunk, Backspace, Cancel, CancelLanguageServerWork, @@ -210,10 +211,10 @@ gpui::actions!( ConvertToUpperCamelCase, ConvertToUpperCase, Copy, + CopyFileLocation, CopyHighlightJson, CopyPath, CopyPermalinkToLine, - CopyFileLocation, CopyRelativePath, Cut, CutToEndOfLine, @@ -230,12 +231,17 @@ gpui::actions!( ExpandMacroRecursively, FindAllReferences, Fold, + FoldAll, + FoldRecursive, FoldSelectedRanges, + ToggleFold, + ToggleFoldRecursive, Format, - GoToDefinition, - GoToDefinitionSplit, + FormatSelections, GoToDeclaration, GoToDeclarationSplit, + GoToDefinition, + GoToDefinitionSplit, GoToDiagnostic, GoToHunk, GoToImplementation, @@ -273,9 +279,10 @@ gpui::actions!( NextScreen, OpenExcerpts, OpenExcerptsSplit, + OpenProposedChangesEditor, + OpenFile, OpenPermalinkToLine, OpenUrl, - OpenFile, Outdent, PageDown, PageUp, @@ -288,18 +295,22 @@ gpui::actions!( RevealInFileManager, ReverseLines, RevertFile, + ReloadFile, RevertSelectedHunks, + Rewrap, ScrollCursorBottom, ScrollCursorCenter, - ScrollCursorTop, ScrollCursorCenterTopBottom, + ScrollCursorTop, SelectAll, SelectAllMatches, SelectDown, - SelectLargerSyntaxNode, SelectEnclosingSymbol, + SelectLargerSyntaxNode, SelectLeft, SelectLine, + SelectPageDown, + SelectPageUp, SelectRight, SelectSmallerSyntaxNode, SelectToBeginning, @@ -311,8 +322,6 @@ gpui::actions!( SelectToPreviousWordStart, SelectToStartOfParagraph, SelectUp, - SelectPageDown, - SelectPageUp, ShowCharacterPalette, ShowInlineCompletion, ShowSignatureHelp, @@ -326,19 +335,21 @@ gpui::actions!( ToggleAutoSignatureHelp, ToggleGitBlame, ToggleGitBlameInline, - ToggleSelectionMenu, ToggleHunkDiff, + ToggleIndentGuides, ToggleInlayHints, ToggleInlineCompletions, ToggleLineNumbers, ToggleRelativeLineNumbers, - ToggleIndentGuides, + ToggleSelectionMenu, ToggleSoftWrap, ToggleTabBar, Transpose, Undo, UndoSelection, + UnfoldAll, UnfoldLines, + UnfoldRecursive, UniqueLinesCaseInsensitive, UniqueLinesCaseSensitive, ] diff --git a/crates/editor/src/clangd_ext.rs b/crates/editor/src/clangd_ext.rs index 7fbb8f5f41b11d..501f81b1073df2 100644 --- a/crates/editor/src/clangd_ext.rs +++ b/crates/editor/src/clangd_ext.rs @@ -9,10 +9,10 @@ use crate::lsp_ext::find_specific_language_server_in_selection; use crate::{element::register_action, Editor, SwitchSourceHeader}; -static CLANGD_SERVER_NAME: &str = "clangd"; +const CLANGD_SERVER_NAME: &str = "clangd"; fn is_c_language(language: &Language) -> bool { - return language.name().as_ref() == "C++" || language.name().as_ref() == "C"; + return language.name() == "C++".into() || language.name() == "C".into(); } pub fn switch_source_header( diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 6cb871555118e4..790a0a6a1eba78 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -127,7 +127,9 @@ impl DisplayMap { let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let tab_size = Self::tab_size(&buffer, cx); - let (inlay_map, snapshot) = InlayMap::new(buffer.read(cx).snapshot(cx)); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let crease_map = CreaseMap::new(&buffer_snapshot); + let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot); let (fold_map, snapshot) = FoldMap::new(snapshot); let (tab_map, snapshot) = TabMap::new(snapshot, tab_size); let (wrap_map, snapshot) = WrapMap::new(snapshot, font, font_size, wrap_width, cx); @@ -138,7 +140,6 @@ impl DisplayMap { excerpt_header_height, excerpt_footer_height, ); - let crease_map = CreaseMap::default(); cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach(); @@ -1645,7 +1646,7 @@ pub mod tests { }, ..Default::default() }, - Some(tree_sitter_rust::language()), + Some(tree_sitter_rust::LANGUAGE.into()), ) .with_highlights_query( r#" @@ -1750,7 +1751,7 @@ pub mod tests { }, ..Default::default() }, - Some(tree_sitter_rust::language()), + Some(tree_sitter_rust::LANGUAGE.into()), ) .with_highlights_query( r#" @@ -1833,7 +1834,7 @@ pub mod tests { }, ..Default::default() }, - Some(tree_sitter_rust::language()), + Some(tree_sitter_rust::LANGUAGE.into()), ) .with_highlights_query( r#" diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 28e0b9d7af3709..f4ee57408b1ca8 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -5,8 +5,8 @@ use super::{ use crate::{EditorStyle, GutterDimensions}; use collections::{Bound, HashMap, HashSet}; use gpui::{AnyElement, EntityId, Pixels, WindowContext}; -use language::{BufferSnapshot, Chunk, Patch, Point}; -use multi_buffer::{Anchor, ExcerptId, ExcerptRange, MultiBufferRow, ToPoint as _}; +use language::{Chunk, Patch, Point}; +use multi_buffer::{Anchor, ExcerptId, ExcerptInfo, MultiBufferRow, ToPoint as _}; use parking_lot::Mutex; use std::{ cell::RefCell, @@ -128,26 +128,17 @@ pub struct BlockContext<'a, 'b> { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum BlockId { Custom(CustomBlockId), - ExcerptHeader(ExcerptId), - ExcerptFooter(ExcerptId), -} - -impl From for EntityId { - fn from(value: BlockId) -> Self { - match value { - BlockId::Custom(CustomBlockId(id)) => EntityId::from(id as u64), - BlockId::ExcerptHeader(id) => id.into(), - BlockId::ExcerptFooter(id) => id.into(), - } - } + ExcerptBoundary(Option), } impl From for ElementId { fn from(value: BlockId) -> Self { match value { BlockId::Custom(CustomBlockId(id)) => ("Block", id).into(), - BlockId::ExcerptHeader(id) => ("ExcerptHeader", EntityId::from(id)).into(), - BlockId::ExcerptFooter(id) => ("ExcerptFooter", EntityId::from(id)).into(), + BlockId::ExcerptBoundary(next_excerpt) => match next_excerpt { + Some(id) => ("ExcerptBoundary", EntityId::from(id)).into(), + None => "LastExcerptBoundary".into(), + }, } } } @@ -156,8 +147,7 @@ impl std::fmt::Display for BlockId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Custom(id) => write!(f, "Block({id:?})"), - Self::ExcerptHeader(id) => write!(f, "ExcerptHeader({id:?})"), - Self::ExcerptFooter(id) => write!(f, "ExcerptFooter({id:?})"), + Self::ExcerptBoundary(id) => write!(f, "ExcerptHeader({id:?})"), } } } @@ -177,8 +167,7 @@ struct Transform { pub(crate) enum BlockType { Custom(CustomBlockId), - Header, - Footer, + ExcerptBoundary, } pub(crate) trait BlockLike { @@ -191,27 +180,20 @@ pub(crate) trait BlockLike { #[derive(Clone)] pub enum Block { Custom(Arc), - ExcerptHeader { - id: ExcerptId, - buffer: BufferSnapshot, - range: ExcerptRange, + ExcerptBoundary { + prev_excerpt: Option, + next_excerpt: Option, height: u32, starts_new_buffer: bool, show_excerpt_controls: bool, }, - ExcerptFooter { - id: ExcerptId, - disposition: BlockDisposition, - height: u32, - }, } impl BlockLike for Block { fn block_type(&self) -> BlockType { match self { Block::Custom(block) => BlockType::Custom(block.id), - Block::ExcerptHeader { .. } => BlockType::Header, - Block::ExcerptFooter { .. } => BlockType::Footer, + Block::ExcerptBoundary { .. } => BlockType::ExcerptBoundary, } } @@ -222,8 +204,7 @@ impl BlockLike for Block { fn priority(&self) -> usize { match self { Block::Custom(block) => block.priority, - Block::ExcerptHeader { .. } => usize::MAX, - Block::ExcerptFooter { .. } => 0, + Block::ExcerptBoundary { .. } => usize::MAX, } } } @@ -232,32 +213,36 @@ impl Block { pub fn id(&self) -> BlockId { match self { Block::Custom(block) => BlockId::Custom(block.id), - Block::ExcerptHeader { id, .. } => BlockId::ExcerptHeader(*id), - Block::ExcerptFooter { id, .. } => BlockId::ExcerptFooter(*id), + Block::ExcerptBoundary { next_excerpt, .. } => { + BlockId::ExcerptBoundary(next_excerpt.as_ref().map(|info| info.id)) + } } } fn disposition(&self) -> BlockDisposition { match self { Block::Custom(block) => block.disposition, - Block::ExcerptHeader { .. } => BlockDisposition::Above, - Block::ExcerptFooter { disposition, .. } => *disposition, + Block::ExcerptBoundary { next_excerpt, .. } => { + if next_excerpt.is_some() { + BlockDisposition::Above + } else { + BlockDisposition::Below + } + } } } pub fn height(&self) -> u32 { match self { Block::Custom(block) => block.height, - Block::ExcerptHeader { height, .. } => *height, - Block::ExcerptFooter { height, .. } => *height, + Block::ExcerptBoundary { height, .. } => *height, } } pub fn style(&self) -> BlockStyle { match self { Block::Custom(block) => block.style, - Block::ExcerptHeader { .. } => BlockStyle::Sticky, - Block::ExcerptFooter { .. } => BlockStyle::Sticky, + Block::ExcerptBoundary { .. } => BlockStyle::Sticky, } } } @@ -266,24 +251,17 @@ impl Debug for Block { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Custom(block) => f.debug_struct("Custom").field("block", block).finish(), - Self::ExcerptHeader { - buffer, + Self::ExcerptBoundary { starts_new_buffer, - id, + next_excerpt, + prev_excerpt, .. } => f - .debug_struct("ExcerptHeader") - .field("id", &id) - .field("path", &buffer.file().map(|f| f.path())) + .debug_struct("ExcerptBoundary") + .field("prev_excerpt", &prev_excerpt) + .field("next_excerpt", &next_excerpt) .field("starts_new_buffer", &starts_new_buffer) .finish(), - Block::ExcerptFooter { - id, disposition, .. - } => f - .debug_struct("ExcerptFooter") - .field("id", &id) - .field("disposition", &disposition) - .finish(), } } } @@ -389,10 +367,10 @@ impl BlockMap { } let mut transforms = self.transforms.borrow_mut(); - let mut new_transforms = SumTree::new(); + let mut new_transforms = SumTree::default(); let old_row_count = transforms.summary().input_rows; let new_row_count = wrap_snapshot.max_point().row() + 1; - let mut cursor = transforms.cursor::(); + let mut cursor = transforms.cursor::(&()); let mut last_block_ix = 0; let mut blocks_in_edit = Vec::new(); let mut edits = edits.into_iter().peekable(); @@ -595,66 +573,62 @@ impl BlockMap { { buffer .excerpt_boundaries_in_range(range) - .flat_map(move |excerpt_boundary| { - let mut wrap_row = wrap_snapshot - .make_wrap_point(Point::new(excerpt_boundary.row.0, 0), Bias::Left) - .row(); - - [ - show_excerpt_controls - .then(|| { - let disposition; - if excerpt_boundary.next.is_some() { - disposition = BlockDisposition::Above; - } else { - wrap_row = wrap_snapshot - .make_wrap_point( - Point::new( - excerpt_boundary.row.0, - buffer.line_len(excerpt_boundary.row), - ), - Bias::Left, - ) - .row(); - disposition = BlockDisposition::Below; - } - - excerpt_boundary.prev.as_ref().map(|prev| { - ( - wrap_row, - Block::ExcerptFooter { - id: prev.id, - height: excerpt_footer_height, - disposition, - }, - ) - }) - }) - .flatten(), - excerpt_boundary.next.map(|next| { - let starts_new_buffer = excerpt_boundary - .prev - .map_or(true, |prev| prev.buffer_id != next.buffer_id); - - ( - wrap_row, - Block::ExcerptHeader { - id: next.id, - buffer: next.buffer, - range: next.range, - height: if starts_new_buffer { - buffer_header_height - } else { - excerpt_header_height - }, - starts_new_buffer, - show_excerpt_controls, - }, + .filter_map(move |excerpt_boundary| { + let wrap_row; + if excerpt_boundary.next.is_some() { + wrap_row = wrap_snapshot + .make_wrap_point(Point::new(excerpt_boundary.row.0, 0), Bias::Left) + .row(); + } else { + wrap_row = wrap_snapshot + .make_wrap_point( + Point::new( + excerpt_boundary.row.0, + buffer.line_len(excerpt_boundary.row), + ), + Bias::Left, ) - }), - ] + .row(); + } + + let starts_new_buffer = match (&excerpt_boundary.prev, &excerpt_boundary.next) { + (_, None) => false, + (None, Some(_)) => true, + (Some(prev), Some(next)) => prev.buffer_id != next.buffer_id, + }; + + let mut height = 0; + if excerpt_boundary.prev.is_some() { + if show_excerpt_controls { + height += excerpt_footer_height; + } + } + if excerpt_boundary.next.is_some() { + if starts_new_buffer { + height += buffer_header_height; + if show_excerpt_controls { + height += excerpt_header_height; + } + } else { + height += excerpt_header_height; + } + } + + if height == 0 { + return None; + } + + Some(( + wrap_row, + Block::ExcerptBoundary { + prev_excerpt: excerpt_boundary.prev, + next_excerpt: excerpt_boundary.next, + height, + starts_new_buffer, + show_excerpt_controls, + }, + )) }) - .flatten() } pub(crate) fn sort_blocks(blocks: &mut [(u32, B)]) { @@ -665,12 +639,9 @@ impl BlockMap { .disposition() .cmp(&block_b.disposition()) .then_with(|| match ((block_a.block_type()), (block_b.block_type())) { - (BlockType::Footer, BlockType::Footer) => Ordering::Equal, - (BlockType::Footer, _) => Ordering::Less, - (_, BlockType::Footer) => Ordering::Greater, - (BlockType::Header, BlockType::Header) => Ordering::Equal, - (BlockType::Header, _) => Ordering::Less, - (_, BlockType::Header) => Ordering::Greater, + (BlockType::ExcerptBoundary, BlockType::ExcerptBoundary) => Ordering::Equal, + (BlockType::ExcerptBoundary, _) => Ordering::Less, + (_, BlockType::ExcerptBoundary) => Ordering::Greater, (BlockType::Custom(a_id), BlockType::Custom(b_id)) => block_b .priority() .cmp(&block_a.priority()) @@ -757,7 +728,7 @@ impl<'a> BlockMapReader<'a> { .unwrap_or(self.wrap_snapshot.max_point().row() + 1), ); - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&start_wrap_row, Bias::Left, &()); while let Some(transform) = cursor.item() { if cursor.start().0 > end_wrap_row { @@ -950,7 +921,7 @@ impl BlockSnapshot { highlights: Highlights<'a>, ) -> BlockChunks<'a> { let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let input_end = { cursor.seek(&BlockRow(rows.end), Bias::Right, &()); let overshoot = if cursor @@ -990,7 +961,7 @@ impl BlockSnapshot { } pub(super) fn buffer_rows(&self, start_row: BlockRow) -> BlockBufferRows { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&start_row, Bias::Right, &()); let (output_start, input_start) = cursor.start(); let overshoot = if cursor.item().map_or(false, |t| t.is_isomorphic()) { @@ -1008,7 +979,7 @@ impl BlockSnapshot { } pub fn blocks_in_range(&self, rows: Range) -> impl Iterator { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&BlockRow(rows.start), Bias::Left, &()); while cursor.start().0 < rows.start && cursor.end(&()).0 <= rows.start { cursor.next(&()); @@ -1045,34 +1016,20 @@ impl BlockSnapshot { let custom_block = self.custom_blocks_by_id.get(&custom_block_id)?; Some(Block::Custom(custom_block.clone())) } - BlockId::ExcerptHeader(excerpt_id) => { - let excerpt_range = buffer.range_for_excerpt::(excerpt_id)?; - let wrap_point = self - .wrap_snapshot - .make_wrap_point(excerpt_range.start, Bias::Left); - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); - cursor.seek(&WrapRow(wrap_point.row()), Bias::Left, &()); - while let Some(transform) = cursor.item() { - if let Some(block) = transform.block.as_ref() { - if block.id() == block_id { - return Some(block.clone()); - } - } else if cursor.start().0 > WrapRow(wrap_point.row()) { - break; - } - - cursor.next(&()); + BlockId::ExcerptBoundary(next_excerpt_id) => { + let wrap_point; + if let Some(next_excerpt_id) = next_excerpt_id { + let excerpt_range = buffer.range_for_excerpt::(next_excerpt_id)?; + wrap_point = self + .wrap_snapshot + .make_wrap_point(excerpt_range.start, Bias::Left); + } else { + wrap_point = self + .wrap_snapshot + .make_wrap_point(buffer.max_point(), Bias::Left); } - None - } - BlockId::ExcerptFooter(excerpt_id) => { - let excerpt_range = buffer.range_for_excerpt::(excerpt_id)?; - let wrap_point = self - .wrap_snapshot - .make_wrap_point(excerpt_range.end, Bias::Left); - - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Left, &()); while let Some(transform) = cursor.item() { if let Some(block) = transform.block.as_ref() { @@ -1102,7 +1059,7 @@ impl BlockSnapshot { } pub(super) fn line_len(&self, row: BlockRow) -> u32 { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(row.0), Bias::Right, &()); if let Some(transform) = cursor.item() { let (output_start, input_start) = cursor.start(); @@ -1118,13 +1075,13 @@ impl BlockSnapshot { } pub(super) fn is_block_line(&self, row: BlockRow) -> bool { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&row, Bias::Right, &()); cursor.item().map_or(false, |t| t.block.is_some()) } pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(point.row), Bias::Right, &()); let max_input_row = WrapRow(self.transforms.summary().input_rows); @@ -1172,7 +1129,7 @@ impl BlockSnapshot { } pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint { - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &()); if let Some(transform) = cursor.item() { debug_assert!(transform.is_isomorphic()); @@ -1188,7 +1145,7 @@ impl BlockSnapshot { } pub fn to_wrap_point(&self, block_point: BlockPoint) -> WrapPoint { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(block_point.row), Bias::Right, &()); if let Some(transform) = cursor.item() { match transform.block.as_ref().map(|b| b.disposition()) { @@ -1360,7 +1317,7 @@ impl<'a> Iterator for BlockBufferRows<'a> { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.summary.clone() } } @@ -1368,6 +1325,10 @@ impl sum_tree::Item for Transform { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.input_rows += summary.input_rows; self.output_rows += summary.output_rows; @@ -1375,12 +1336,20 @@ impl sum_tree::Summary for TransformSummary { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapRow { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.input_rows; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for BlockRow { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.output_rows; } @@ -1456,7 +1425,7 @@ mod tests { }; use gpui::{div, font, px, AppContext, Context as _, Element}; use language::{Buffer, Capability}; - use multi_buffer::MultiBuffer; + use multi_buffer::{ExcerptRange, MultiBuffer}; use rand::prelude::*; use settings::SettingsStore; use std::env; @@ -1659,7 +1628,7 @@ mod tests { let mut excerpt_ids = Vec::new(); let multi_buffer = cx.new_model(|cx| { - let mut multi_buffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); excerpt_ids.extend(multi_buffer.push_excerpts( buffer1.clone(), [ExcerptRange { @@ -1712,22 +1681,20 @@ mod tests { // Each excerpt has a header above and footer below. Excerpts are also *separated* by a newline. assert_eq!( snapshot.text(), - "\nBuff\ner 1\n\n\nBuff\ner 2\n\n\nBuff\ner 3\n" + "\n\nBuff\ner 1\n\n\n\nBuff\ner 2\n\n\n\nBuff\ner 3\n" ); let blocks: Vec<_> = snapshot .blocks_in_range(0..u32::MAX) - .map(|(row, block)| (row, block.id())) + .map(|(row, block)| (row..row + block.height(), block.id())) .collect(); assert_eq!( blocks, vec![ - (0, BlockId::ExcerptHeader(excerpt_ids[0])), - (3, BlockId::ExcerptFooter(excerpt_ids[0])), - (4, BlockId::ExcerptHeader(excerpt_ids[1])), - (7, BlockId::ExcerptFooter(excerpt_ids[1])), - (8, BlockId::ExcerptHeader(excerpt_ids[2])), - (11, BlockId::ExcerptFooter(excerpt_ids[2])) + (0..2, BlockId::ExcerptBoundary(Some(excerpt_ids[0]))), // path, header + (4..7, BlockId::ExcerptBoundary(Some(excerpt_ids[1]))), // footer, path, header + (9..12, BlockId::ExcerptBoundary(Some(excerpt_ids[2]))), // footer, path, header + (14..15, BlockId::ExcerptBoundary(None)), // footer ] ); } @@ -2271,13 +2238,10 @@ mod tests { #[derive(Debug, Eq, PartialEq)] enum ExpectedBlock { - ExcerptHeader { + ExcerptBoundary { height: u32, starts_new_buffer: bool, - }, - ExcerptFooter { - height: u32, - disposition: BlockDisposition, + is_last: bool, }, Custom { disposition: BlockDisposition, @@ -2291,8 +2255,7 @@ mod tests { fn block_type(&self) -> BlockType { match self { ExpectedBlock::Custom { id, .. } => BlockType::Custom(*id), - ExpectedBlock::ExcerptHeader { .. } => BlockType::Header, - ExpectedBlock::ExcerptFooter { .. } => BlockType::Footer, + ExpectedBlock::ExcerptBoundary { .. } => BlockType::ExcerptBoundary, } } @@ -2303,8 +2266,7 @@ mod tests { fn priority(&self) -> usize { match self { ExpectedBlock::Custom { priority, .. } => *priority, - ExpectedBlock::ExcerptHeader { .. } => usize::MAX, - ExpectedBlock::ExcerptFooter { .. } => 0, + ExpectedBlock::ExcerptBoundary { .. } => usize::MAX, } } } @@ -2312,17 +2274,21 @@ mod tests { impl ExpectedBlock { fn height(&self) -> u32 { match self { - ExpectedBlock::ExcerptHeader { height, .. } => *height, + ExpectedBlock::ExcerptBoundary { height, .. } => *height, ExpectedBlock::Custom { height, .. } => *height, - ExpectedBlock::ExcerptFooter { height, .. } => *height, } } fn disposition(&self) -> BlockDisposition { match self { - ExpectedBlock::ExcerptHeader { .. } => BlockDisposition::Above, + ExpectedBlock::ExcerptBoundary { is_last, .. } => { + if *is_last { + BlockDisposition::Below + } else { + BlockDisposition::Above + } + } ExpectedBlock::Custom { disposition, .. } => *disposition, - ExpectedBlock::ExcerptFooter { disposition, .. } => *disposition, } } } @@ -2336,21 +2302,15 @@ mod tests { height: block.height, priority: block.priority, }, - Block::ExcerptHeader { + Block::ExcerptBoundary { height, starts_new_buffer, + next_excerpt, .. - } => ExpectedBlock::ExcerptHeader { + } => ExpectedBlock::ExcerptBoundary { height, starts_new_buffer, - }, - Block::ExcerptFooter { - height, - disposition, - .. - } => ExpectedBlock::ExcerptFooter { - height, - disposition, + is_last: next_excerpt.is_none(), }, } } @@ -2368,8 +2328,7 @@ mod tests { fn as_custom(&self) -> Option<&CustomBlock> { match self { Block::Custom(block) => Some(block), - Block::ExcerptHeader { .. } => None, - Block::ExcerptFooter { .. } => None, + Block::ExcerptBoundary { .. } => None, } } } diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index 9aa2728dca8c45..531c650c43a6ed 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -1,29 +1,45 @@ use collections::HashMap; use gpui::{AnyElement, IntoElement}; use multi_buffer::{Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, ToPoint}; +use serde::{Deserialize, Serialize}; use std::{cmp::Ordering, ops::Range, sync::Arc}; use sum_tree::{Bias, SeekTarget, SumTree}; use text::Point; -use ui::WindowContext; +use ui::{IconName, SharedString, WindowContext}; use crate::FoldPlaceholder; #[derive(Copy, Clone, Default, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)] pub struct CreaseId(usize); -#[derive(Default)] pub struct CreaseMap { snapshot: CreaseSnapshot, next_id: CreaseId, id_to_range: HashMap>, } -#[derive(Clone, Default)] +impl CreaseMap { + pub fn new(snapshot: &MultiBufferSnapshot) -> Self { + CreaseMap { + snapshot: CreaseSnapshot::new(snapshot), + next_id: CreaseId::default(), + id_to_range: HashMap::default(), + } + } +} + +#[derive(Clone)] pub struct CreaseSnapshot { creases: SumTree, } impl CreaseSnapshot { + pub fn new(snapshot: &MultiBufferSnapshot) -> Self { + CreaseSnapshot { + creases: SumTree::new(snapshot), + } + } + /// Returns the first Crease starting on the specified buffer row. pub fn query_row<'a>( &'a self, @@ -31,7 +47,7 @@ impl CreaseSnapshot { snapshot: &'a MultiBufferSnapshot, ) -> Option<&'a Crease> { let start = snapshot.anchor_before(Point::new(row.0, 0)); - let mut cursor = self.creases.cursor::(); + let mut cursor = self.creases.cursor::(snapshot); cursor.seek(&start, Bias::Left, snapshot); while let Some(item) = cursor.item() { match Ord::cmp(&item.crease.range.start.to_point(snapshot).row, &row.0) { @@ -49,11 +65,36 @@ impl CreaseSnapshot { None } + pub fn creases_in_range<'a>( + &'a self, + range: Range, + snapshot: &'a MultiBufferSnapshot, + ) -> impl 'a + Iterator { + let start = snapshot.anchor_before(Point::new(range.start.0, 0)); + let mut cursor = self.creases.cursor::(snapshot); + cursor.seek(&start, Bias::Left, snapshot); + + std::iter::from_fn(move || { + while let Some(item) = cursor.item() { + cursor.next(snapshot); + let crease_start = item.crease.range.start.to_point(snapshot); + let crease_end = item.crease.range.end.to_point(snapshot); + if crease_end.row > range.end.0 { + continue; + } + if crease_start.row >= range.start.0 && crease_end.row < range.end.0 { + return Some(&item.crease); + } + } + None + }) + } + pub fn crease_items_with_offsets( &self, snapshot: &MultiBufferSnapshot, ) -> Vec<(CreaseId, Range)> { - let mut cursor = self.creases.cursor::(); + let mut cursor = self.creases.cursor::(snapshot); let mut results = Vec::new(); cursor.next(snapshot); @@ -87,6 +128,14 @@ pub struct Crease { pub placeholder: FoldPlaceholder, pub render_toggle: RenderToggleFn, pub render_trailer: RenderTrailerFn, + pub metadata: Option, +} + +/// Metadata about a [`Crease`], that is used for serialization. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct CreaseMetadata { + pub icon: IconName, + pub label: SharedString, } impl Crease { @@ -124,8 +173,14 @@ impl Crease { render_trailer: Arc::new(move |row, folded, cx| { render_trailer(row, folded, cx).into_any_element() }), + metadata: None, } } + + pub fn with_metadata(mut self, metadata: CreaseMetadata) -> Self { + self.metadata = Some(metadata); + self + } } impl std::fmt::Debug for Crease { @@ -154,8 +209,8 @@ impl CreaseMap { ) -> Vec { let mut new_ids = Vec::new(); self.snapshot.creases = { - let mut new_creases = SumTree::new(); - let mut cursor = self.snapshot.creases.cursor::(); + let mut new_creases = SumTree::new(snapshot); + let mut cursor = self.snapshot.creases.cursor::(snapshot); for crease in creases { new_creases.append(cursor.slice(&crease.range, Bias::Left, snapshot), snapshot); @@ -187,8 +242,8 @@ impl CreaseMap { }); self.snapshot.creases = { - let mut new_creases = SumTree::new(); - let mut cursor = self.snapshot.creases.cursor::(); + let mut new_creases = SumTree::new(snapshot); + let mut cursor = self.snapshot.creases.cursor::(snapshot); for (id, range) in removals { new_creases.append(cursor.slice(&range, Bias::Left, snapshot), snapshot); @@ -224,6 +279,10 @@ impl Default for ItemSummary { impl sum_tree::Summary for ItemSummary { type Context = MultiBufferSnapshot; + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _snapshot: &MultiBufferSnapshot) { self.range = other.range.clone(); } @@ -232,7 +291,7 @@ impl sum_tree::Summary for ItemSummary { impl sum_tree::Item for CreaseItem { type Summary = ItemSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary { ItemSummary { range: self.crease.range.clone(), } @@ -263,7 +322,7 @@ mod test { let text = "line1\nline2\nline3\nline4\nline5"; let buffer = MultiBuffer::build_simple(text, cx); let snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); - let mut crease_map = CreaseMap::default(); + let mut crease_map = CreaseMap::new(&buffer.read(cx).read(cx)); // Insert creases let creases = [ @@ -304,4 +363,54 @@ mod test { .query_row(MultiBufferRow(3), &snapshot) .is_none()); } + + #[gpui::test] + fn test_creases_in_range(cx: &mut AppContext) { + let text = "line1\nline2\nline3\nline4\nline5\nline6\nline7"; + let buffer = MultiBuffer::build_simple(text, cx); + let snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); + let mut crease_map = CreaseMap::new(&snapshot); + + let creases = [ + Crease::new( + snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 5)), + FoldPlaceholder::test(), + |_row, _folded, _toggle, _cx| div(), + |_row, _folded, _cx| div(), + ), + Crease::new( + snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_after(Point::new(3, 5)), + FoldPlaceholder::test(), + |_row, _folded, _toggle, _cx| div(), + |_row, _folded, _cx| div(), + ), + Crease::new( + snapshot.anchor_before(Point::new(5, 0))..snapshot.anchor_after(Point::new(5, 5)), + FoldPlaceholder::test(), + |_row, _folded, _toggle, _cx| div(), + |_row, _folded, _cx| div(), + ), + ]; + crease_map.insert(creases, &snapshot); + + let crease_snapshot = crease_map.snapshot(); + + let range = MultiBufferRow(0)..MultiBufferRow(7); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 3); + + let range = MultiBufferRow(2)..MultiBufferRow(5); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 1); + assert_eq!(creases[0].range.start.to_point(&snapshot).row, 3); + + let range = MultiBufferRow(0)..MultiBufferRow(2); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 1); + assert_eq!(creases[0].range.start.to_point(&snapshot).row, 1); + + let range = MultiBufferRow(6)..MultiBufferRow(7); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 0); + } } diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 486fe4b2e5d40b..5eb26ff9693883 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -79,7 +79,7 @@ impl FoldPoint { } pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint { - let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = self.0 - cursor.start().0 .0; InlayPoint(cursor.start().1 .0 + overshoot) @@ -88,7 +88,7 @@ impl FoldPoint { pub fn to_offset(self, snapshot: &FoldSnapshot) -> FoldOffset { let mut cursor = snapshot .transforms - .cursor::<(FoldPoint, TransformSummary)>(); + .cursor::<(FoldPoint, TransformSummary)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = self.0 - cursor.start().1.output.lines; let mut offset = cursor.start().1.output.len; @@ -105,6 +105,10 @@ impl FoldPoint { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.lines; } @@ -154,8 +158,8 @@ impl<'a> FoldMapWriter<'a> { folds.sort_unstable_by(|a, b| sum_tree::SeekTarget::cmp(&a.range, &b.range, buffer)); self.0.snapshot.folds = { - let mut new_tree = SumTree::new(); - let mut cursor = self.0.snapshot.folds.cursor::(); + let mut new_tree = SumTree::new(buffer); + let mut cursor = self.0.snapshot.folds.cursor::(buffer); for fold in folds { new_tree.append(cursor.slice(&fold.range, Bias::Right, buffer), buffer); new_tree.push(fold, buffer); @@ -202,8 +206,8 @@ impl<'a> FoldMapWriter<'a> { fold_ixs_to_delete.dedup(); self.0.snapshot.folds = { - let mut cursor = self.0.snapshot.folds.cursor::(); - let mut folds = SumTree::new(); + let mut cursor = self.0.snapshot.folds.cursor::(buffer); + let mut folds = SumTree::new(buffer); for fold_ix in fold_ixs_to_delete { folds.append(cursor.slice(&fold_ix, Bias::Right, buffer), buffer); cursor.next(buffer); @@ -230,7 +234,7 @@ impl FoldMap { pub(crate) fn new(inlay_snapshot: InlaySnapshot) -> (Self, FoldSnapshot) { let this = Self { snapshot: FoldSnapshot { - folds: Default::default(), + folds: SumTree::new(&inlay_snapshot.buffer), transforms: SumTree::from_item( Transform { summary: TransformSummary { @@ -314,8 +318,8 @@ impl FoldMap { } else { let mut inlay_edits_iter = inlay_edits.iter().cloned().peekable(); - let mut new_transforms = SumTree::::new(); - let mut cursor = self.snapshot.transforms.cursor::(); + let mut new_transforms = SumTree::::default(); + let mut cursor = self.snapshot.transforms.cursor::(&()); cursor.seek(&InlayOffset(0), Bias::Right, &()); while let Some(mut edit) = inlay_edits_iter.next() { @@ -367,7 +371,10 @@ impl FoldMap { let anchor = inlay_snapshot .buffer .anchor_before(inlay_snapshot.to_buffer_offset(edit.new.start)); - let mut folds_cursor = self.snapshot.folds.cursor::(); + let mut folds_cursor = self + .snapshot + .folds + .cursor::(&inlay_snapshot.buffer); folds_cursor.seek( &FoldRange(anchor..Anchor::max()), Bias::Left, @@ -470,8 +477,8 @@ impl FoldMap { let mut old_transforms = self .snapshot .transforms - .cursor::<(InlayOffset, FoldOffset)>(); - let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(); + .cursor::<(InlayOffset, FoldOffset)>(&()); + let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(&()); for mut edit in inlay_edits { old_transforms.seek(&edit.old.start, Bias::Left, &()); @@ -545,7 +552,7 @@ impl FoldSnapshot { pub fn text_summary_for_range(&self, range: Range) -> TextSummary { let mut summary = TextSummary::default(); - let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&range.start, Bias::Right, &()); if let Some(transform) = cursor.item() { let start_in_transform = range.start.0 - cursor.start().0 .0; @@ -594,7 +601,7 @@ impl FoldSnapshot { } pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint { - let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().map_or(false, |t| t.is_fold()) { if bias == Bias::Left || point == cursor.start().0 { @@ -631,7 +638,7 @@ impl FoldSnapshot { } let fold_point = FoldPoint::new(start_row, 0); - let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&fold_point, Bias::Left, &()); let overshoot = fold_point.0 - cursor.start().0 .0; @@ -672,7 +679,7 @@ impl FoldSnapshot { { let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer); let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset); - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&inlay_offset, Bias::Right, &()); cursor.item().map_or(false, |t| t.placeholder.is_some()) } @@ -681,7 +688,7 @@ impl FoldSnapshot { let mut inlay_point = self .inlay_snapshot .to_inlay_point(Point::new(buffer_row.0, 0)); - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&inlay_point, Bias::Right, &()); loop { match cursor.item() { @@ -711,7 +718,7 @@ impl FoldSnapshot { language_aware: bool, highlights: Highlights<'a>, ) -> FoldChunks<'a> { - let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(); + let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); transform_cursor.seek(&range.start, Bias::Right, &()); let inlay_start = { @@ -766,7 +773,7 @@ impl FoldSnapshot { } pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint { - let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if let Some(transform) = cursor.item() { let transform_start = cursor.start().0 .0; @@ -826,7 +833,7 @@ where let buffer = &inlay_snapshot.buffer; let start = buffer.anchor_before(range.start.to_offset(buffer)); let end = buffer.anchor_after(range.end.to_offset(buffer)); - let mut cursor = folds.filter::<_, usize>(move |summary| { + let mut cursor = folds.filter::<_, usize>(buffer, move |summary| { let start_cmp = start.cmp(&summary.max_end, buffer); let end_cmp = end.cmp(&summary.min_start, buffer); @@ -937,7 +944,7 @@ struct TransformSummary { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.summary.clone() } } @@ -945,6 +952,10 @@ impl sum_tree::Item for Transform { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.input += &other.input; self.output += &other.output; @@ -993,7 +1004,7 @@ impl Default for FoldRange { impl sum_tree::Item for Fold { type Summary = FoldSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary { FoldSummary { start: self.range.start, end: self.range.end, @@ -1028,6 +1039,10 @@ impl Default for FoldSummary { impl sum_tree::Summary for FoldSummary { type Context = MultiBufferSnapshot; + fn zero(_cx: &MultiBufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { if other.min_start.cmp(&self.min_start, buffer) == Ordering::Less { self.min_start = other.min_start; @@ -1052,6 +1067,10 @@ impl sum_tree::Summary for FoldSummary { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for FoldRange { + fn zero(_cx: &MultiBufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { self.0.start = summary.start; self.0.end = summary.end; @@ -1065,6 +1084,10 @@ impl<'a> sum_tree::SeekTarget<'a, FoldSummary, FoldRange> for FoldRange { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { + fn zero(_cx: &MultiBufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { *self += summary.count; } @@ -1196,7 +1219,7 @@ impl FoldOffset { pub fn to_point(self, snapshot: &FoldSnapshot) -> FoldPoint { let mut cursor = snapshot .transforms - .cursor::<(FoldOffset, TransformSummary)>(); + .cursor::<(FoldOffset, TransformSummary)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) { Point::new(0, (self.0 - cursor.start().0 .0) as u32) @@ -1210,7 +1233,7 @@ impl FoldOffset { #[cfg(test)] pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset { - let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(); + let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = self.0 - cursor.start().0 .0; InlayOffset(cursor.start().1 .0 + overshoot) @@ -1240,18 +1263,30 @@ impl Sub for FoldOffset { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldOffset { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.len; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.input.lines; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.input.len; } diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index b6ab2cdd28feea..d4e39f2df9270e 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -74,7 +74,7 @@ impl Inlay { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { match self { Transform::Isomorphic(summary) => TransformSummary { input: summary.clone(), @@ -97,6 +97,10 @@ struct TransformSummary { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.input += &other.input; self.output += &other.output; @@ -137,6 +141,10 @@ impl SubAssign for InlayOffset { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.len; } @@ -162,18 +170,30 @@ impl Sub for InlayPoint { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.lines; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for usize { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { *self += &summary.input.len; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for Point { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { *self += &summary.input.lines; } @@ -475,8 +495,8 @@ impl InlayMap { (snapshot.clone(), Vec::new()) } else { let mut inlay_edits = Patch::default(); - let mut new_transforms = SumTree::new(); - let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(); + let mut new_transforms = SumTree::default(); + let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(&()); let mut buffer_edits_iter = buffer_edits.iter().peekable(); while let Some(buffer_edit) = buffer_edits_iter.next() { new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &()); @@ -693,7 +713,7 @@ impl InlaySnapshot { pub fn to_point(&self, offset: InlayOffset) -> InlayPoint { let mut cursor = self .transforms - .cursor::<(InlayOffset, (InlayPoint, usize))>(); + .cursor::<(InlayOffset, (InlayPoint, usize))>(&()); cursor.seek(&offset, Bias::Right, &()); let overshoot = offset.0 - cursor.start().0 .0; match cursor.item() { @@ -723,7 +743,7 @@ impl InlaySnapshot { pub fn to_offset(&self, point: InlayPoint) -> InlayOffset { let mut cursor = self .transforms - .cursor::<(InlayPoint, (InlayOffset, Point))>(); + .cursor::<(InlayPoint, (InlayOffset, Point))>(&()); cursor.seek(&point, Bias::Right, &()); let overshoot = point.0 - cursor.start().0 .0; match cursor.item() { @@ -741,9 +761,8 @@ impl InlaySnapshot { None => self.len(), } } - pub fn to_buffer_point(&self, point: InlayPoint) -> Point { - let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); cursor.seek(&point, Bias::Right, &()); match cursor.item() { Some(Transform::Isomorphic(_)) => { @@ -754,9 +773,8 @@ impl InlaySnapshot { None => self.buffer.max_point(), } } - pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize { - let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); cursor.seek(&offset, Bias::Right, &()); match cursor.item() { Some(Transform::Isomorphic(_)) => { @@ -769,7 +787,7 @@ impl InlaySnapshot { } pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset { - let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(); + let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(&()); cursor.seek(&offset, Bias::Left, &()); loop { match cursor.item() { @@ -801,9 +819,8 @@ impl InlaySnapshot { } } } - pub fn to_inlay_point(&self, point: Point) -> InlayPoint { - let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(&()); cursor.seek(&point, Bias::Left, &()); loop { match cursor.item() { @@ -837,7 +854,7 @@ impl InlaySnapshot { } pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint { - let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); cursor.seek(&point, Bias::Left, &()); loop { match cursor.item() { @@ -934,7 +951,7 @@ impl InlaySnapshot { pub fn text_summary_for_range(&self, range: Range) -> TextSummary { let mut summary = TextSummary::default(); - let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); cursor.seek(&range.start, Bias::Right, &()); let overshoot = range.start.0 - cursor.start().0 .0; @@ -982,7 +999,7 @@ impl InlaySnapshot { } pub fn buffer_rows(&self, row: u32) -> InlayBufferRows<'_> { - let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); let inlay_point = InlayPoint::new(row, 0); cursor.seek(&inlay_point, Bias::Left, &()); @@ -1024,7 +1041,7 @@ impl InlaySnapshot { language_aware: bool, highlights: Highlights<'a>, ) -> InlayChunks<'a> { - let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); cursor.seek(&range.start, Bias::Right, &()); let mut highlight_endpoints = Vec::new(); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 08b2ae0c645bcd..dc4d93058cdf7f 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -204,7 +204,7 @@ impl WrapMap { } } else { let old_rows = self.snapshot.transforms.summary().output.lines.row + 1; - self.snapshot.transforms = SumTree::new(); + self.snapshot.transforms = SumTree::default(); let summary = self.snapshot.tab_snapshot.text_summary(); if !summary.lines.is_zero() { self.snapshot @@ -303,7 +303,7 @@ impl WrapMap { impl WrapSnapshot { fn new(tab_snapshot: TabSnapshot) -> Self { - let mut transforms = SumTree::new(); + let mut transforms = SumTree::default(); let extent = tab_snapshot.text_summary(); if !extent.lines.is_zero() { transforms.push(Transform::isomorphic(extent), &()); @@ -324,7 +324,7 @@ impl WrapSnapshot { if tab_edits.is_empty() { new_transforms = self.transforms.clone(); } else { - let mut old_cursor = self.transforms.cursor::(); + let mut old_cursor = self.transforms.cursor::(&()); let mut tab_edits_iter = tab_edits.iter().peekable(); new_transforms = @@ -424,7 +424,7 @@ impl WrapSnapshot { new_transforms = self.transforms.clone(); } else { let mut row_edits = row_edits.into_iter().peekable(); - let mut old_cursor = self.transforms.cursor::(); + let mut old_cursor = self.transforms.cursor::(&()); new_transforms = old_cursor.slice( &TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0), @@ -537,8 +537,8 @@ impl WrapSnapshot { fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> Patch { let mut wrap_edits = Vec::new(); - let mut old_cursor = self.transforms.cursor::(); - let mut new_cursor = new_snapshot.transforms.cursor::(); + let mut old_cursor = self.transforms.cursor::(&()); + let mut new_cursor = new_snapshot.transforms.cursor::(&()); for mut tab_edit in tab_edits.iter().cloned() { tab_edit.old.start.0.column = 0; tab_edit.old.end.0 += Point::new(1, 0); @@ -579,7 +579,7 @@ impl WrapSnapshot { ) -> WrapChunks<'a> { let output_start = WrapPoint::new(rows.start, 0); let output_end = WrapPoint::new(rows.end, 0); - let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); transforms.seek(&output_start, Bias::Right, &()); let mut input_start = TabPoint(transforms.start().1 .0); if transforms.item().map_or(false, |t| t.is_isomorphic()) { @@ -606,7 +606,7 @@ impl WrapSnapshot { } pub fn line_len(&self, row: u32) -> u32 { - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &()); if cursor .item() @@ -626,7 +626,7 @@ impl WrapSnapshot { } pub fn soft_wrap_indent(&self, row: u32) -> Option { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &()); cursor.item().and_then(|transform| { if transform.is_isomorphic() { @@ -642,7 +642,7 @@ impl WrapSnapshot { } pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows { - let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); let mut input_row = transforms.start().1.row(); if transforms.item().map_or(false, |t| t.is_isomorphic()) { @@ -662,7 +662,7 @@ impl WrapSnapshot { } pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint { - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); let mut tab_point = cursor.start().1 .0; if cursor.item().map_or(false, |t| t.is_isomorphic()) { @@ -680,14 +680,14 @@ impl WrapSnapshot { } pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint { - let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(); + let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&()); cursor.seek(&point, Bias::Right, &()); WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0)) } pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint { if bias == Bias::Left { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().map_or(false, |t| !t.is_isomorphic()) { point = *cursor.start(); @@ -705,7 +705,7 @@ impl WrapSnapshot { *point.column_mut() = 0; - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -725,7 +725,7 @@ impl WrapSnapshot { pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option { point.0 += Point::new(1, 0); - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); while let Some(transform) = cursor.item() { if transform.is_isomorphic() && cursor.start().1.column() == 0 { @@ -747,7 +747,7 @@ impl WrapSnapshot { ); { - let mut transforms = self.transforms.cursor::<()>().peekable(); + let mut transforms = self.transforms.cursor::<()>(&()).peekable(); while let Some(transform) = transforms.next() { if let Some(next_transform) = transforms.peek() { assert!(transform.is_isomorphic() != next_transform.is_isomorphic()); @@ -917,7 +917,7 @@ impl Transform { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.summary.clone() } } @@ -982,6 +982,10 @@ impl WrapPoint { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.input += &other.input; self.output += &other.output; @@ -989,6 +993,10 @@ impl sum_tree::Summary for TransformSummary { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.input.lines; } @@ -1001,6 +1009,10 @@ impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for TabPoi } impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.output.lines; } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index cb4ae63afcd9a6..ba3841b4e2202e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -35,6 +35,7 @@ mod lsp_ext; mod mouse_context_menu; pub mod movement; mod persistence; +mod proposed_changes_editor; mod rust_analyzer_ext; pub mod scroll; mod selections_collection; @@ -46,8 +47,7 @@ mod signature_help; #[cfg(any(test, feature = "test-support"))] pub mod test; -use ::git::diff::{DiffHunk, DiffHunkStatus}; -use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry}; +use ::git::diff::DiffHunkStatus; pub(crate) use actions::*; use aho_corasick::AhoCorasick; use anyhow::{anyhow, Context as _, Result}; @@ -60,31 +60,30 @@ use debounced_delay::DebouncedDelay; use display_map::*; pub use display_map::{DisplayPoint, FoldPlaceholder}; pub use editor_settings::{ - CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, + CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, ShowScrollbar, }; pub use editor_settings_controls::*; use element::LineWithInvisibles; pub use element::{ CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, }; -use futures::FutureExt; +use futures::{future, FutureExt}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::blame::GitBlame; -use git::diff_hunk_to_display; use gpui::{ div, impl_actions, point, prelude::*, px, relative, size, uniform_list, Action, AnyElement, AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds, ClipboardEntry, - ClipboardItem, Context, DispatchPhase, ElementId, EntityId, EventEmitter, FocusHandle, - FocusOutEvent, FocusableView, FontId, FontWeight, HighlightStyle, Hsla, InteractiveText, - KeyContext, ListSizingBehavior, Model, MouseButton, PaintQuad, ParentElement, Pixels, Render, - SharedString, Size, StrikethroughStyle, Styled, StyledText, Subscription, Task, TextStyle, - UTF16Selection, UnderlineStyle, UniformListScrollHandle, View, ViewContext, ViewInputHandler, - VisualContext, WeakFocusHandle, WeakView, WindowContext, + ClipboardItem, Context, DispatchPhase, ElementId, EventEmitter, FocusHandle, FocusOutEvent, + FocusableView, FontId, FontWeight, HighlightStyle, Hsla, InteractiveText, KeyContext, + ListSizingBehavior, Model, MouseButton, PaintQuad, ParentElement, Pixels, Render, SharedString, + Size, StrikethroughStyle, Styled, StyledText, Subscription, Task, TextStyle, UTF16Selection, + UnderlineStyle, UniformListScrollHandle, View, ViewContext, ViewInputHandler, VisualContext, + WeakFocusHandle, WeakView, WindowContext, }; use highlight_matching_bracket::refresh_matching_bracket_highlights; use hover_popover::{hide_hover, HoverState}; -use hunk_diff::ExpandedHunks; pub(crate) use hunk_diff::HoveredHunk; +use hunk_diff::{diff_hunk_to_display, ExpandedHunks}; use indent_guides::ActiveIndentGuidesState; use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy}; pub use inline_completion_provider::*; @@ -96,8 +95,14 @@ use language::{ CursorShape, Diagnostic, Documentation, IndentKind, IndentSize, Language, OffsetRangeExt, Point, Selection, SelectionGoal, TransactionId, }; -use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange}; +use language::{ + point_to_lsp, BufferRow, CharClassifier, LanguageServerName, Runnable, RunnableRange, +}; use linked_editing_ranges::refresh_linked_ranges; +pub use proposed_changes_editor::{ + ProposedChangeLocation, ProposedChangesEditor, ProposedChangesEditorToolbar, +}; +use similar::{ChangeTag, TextDiff}; use task::{ResolvedTask, TaskTemplate, TaskVariables}; use hover_links::{find_file, HoverLink, HoveredLinkState, InlayHighlight}; @@ -112,13 +117,16 @@ pub use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, }; -use multi_buffer::{ExpandExcerptDirection, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16}; +use multi_buffer::{ + ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16, +}; use ordered_float::OrderedFloat; use parking_lot::{Mutex, RwLock}; -use project::project_settings::{GitGutterSetting, ProjectSettings}; use project::{ - CodeAction, Completion, CompletionIntent, FormatTrigger, Item, Location, Project, ProjectPath, - ProjectTransaction, TaskSourceKind, + lsp_store::{FormatTarget, FormatTrigger}, + project_settings::{GitGutterSetting, ProjectSettings}, + CodeAction, Completion, CompletionIntent, DocumentHighlight, InlayHint, Item, Location, + LocationLink, Project, ProjectPath, ProjectTransaction, TaskSourceKind, }; use rand::prelude::*; use rpc::{proto::*, ErrorExt}; @@ -150,20 +158,20 @@ use theme::{ }; use ui::{ h_flex, prelude::*, ButtonSize, ButtonStyle, Disclosure, IconButton, IconName, IconSize, - ListItem, Popover, Tooltip, + ListItem, Popover, PopoverMenuHandle, Tooltip, }; use util::{defer, maybe, post_inc, RangeExt, ResultExt, TryFutureExt}; use workspace::item::{ItemHandle, PreviewTabsSettings}; -use workspace::notifications::{DetachAndPromptErr, NotificationId}; +use workspace::notifications::{DetachAndPromptErr, NotificationId, NotifyTaskExt}; use workspace::{ searchable::SearchEvent, ItemNavHistory, SplitDirection, ViewId, Workspace, WorkspaceId, }; -use workspace::{OpenInTerminal, OpenTerminal, TabBarSettings, Toast}; +use workspace::{Item as WorkspaceItem, OpenInTerminal, OpenTerminal, TabBarSettings, Toast}; use crate::hover_links::find_url; use crate::signature_help::{SignatureHelpHiddenBy, SignatureHelpState}; -pub const FILE_HEADER_HEIGHT: u32 = 1; +pub const FILE_HEADER_HEIGHT: u32 = 2; pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1; pub const MULTI_BUFFER_EXCERPT_FOOTER_HEIGHT: u32 = 1; pub const DEFAULT_MULTIBUFFER_CONTEXT: u32 = 2; @@ -370,12 +378,20 @@ pub enum EditorMode { Full, } -#[derive(Clone, Debug)] +#[derive(Copy, Clone, Debug)] pub enum SoftWrap { + /// Prefer not to wrap at all. + /// + /// Note: this is currently internal, as actually limited by [`crate::MAX_LINE_LEN`] until it wraps. + /// The mode is used inside git diff hunks, where it's seems currently more useful to not wrap as much as possible. + GitDiff, + /// Prefer a single line generally, unless an overly long line is encountered. None, - PreferLine, + /// Soft wrap lines that exceed the editor width. EditorWidth, + /// Soft wrap lines at the preferred line length. Column(u32), + /// Soft wrap line at the preferred line length or the editor width (whichever is smaller). Bounded(u32), } @@ -411,8 +427,37 @@ impl Default for EditorStyle { } } +pub fn make_inlay_hints_style(cx: &WindowContext) -> HighlightStyle { + let show_background = all_language_settings(None, cx) + .language(None) + .inlay_hints + .show_background; + + HighlightStyle { + color: Some(cx.theme().status().hint), + background_color: show_background.then(|| cx.theme().status().hint_background), + ..HighlightStyle::default() + } +} + type CompletionId = usize; +#[derive(Clone, Debug)] +struct CompletionState { + // render_inlay_ids represents the inlay hints that are inserted + // for rendering the inline completions. They may be discontinuous + // in the event that the completion provider returns some intersection + // with the existing content. + render_inlay_ids: Vec, + // text is the resulting rope that is inserted when the user accepts a completion. + text: Rope, + // position is the position of the cursor when the completion was triggered. + position: multi_buffer::Anchor, + // delete_range is the range of text that this completion state covers. + // if the completion is accepted, this range should be deleted. + delete_range: Option>, +} + #[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug, Default)] struct EditorActionId(usize); @@ -503,6 +548,7 @@ pub struct Editor { active_diagnostics: Option, soft_wrap_mode_override: Option, project: Option>, + semantics_provider: Option>, completion_provider: Option>, collaboration_hub: Option>, blink_manager: Model, @@ -529,14 +575,15 @@ pub struct Editor { nav_history: Option, context_menu: RwLock>, mouse_context_menu: Option, + hunk_controls_menu_handle: PopoverMenuHandle, completion_tasks: Vec<(CompletionId, Task>)>, signature_help_state: SignatureHelpState, auto_signature_help: Option, find_all_references_task_sources: Vec, next_completion_id: CompletionId, completion_documentation_pre_resolve_debounce: DebouncedDelay, - available_code_actions: Option<(Location, Arc<[CodeAction]>)>, - code_actions_task: Option>, + available_code_actions: Option<(Location, Arc<[AvailableCodeAction]>)>, + code_actions_task: Option>>, document_highlights_task: Option>, linked_editing_range_task: Option>>, linked_edit_ranges: linked_editing_ranges::LinkedEditingRanges, @@ -556,7 +603,8 @@ pub struct Editor { gutter_hovered: bool, hovered_link_state: Option, inline_completion_provider: Option, - active_inline_completion: Option<(Inlay, Option>)>, + code_action_providers: Vec>, + active_inline_completion: Option, // enable_inline_completions is a switch that Vim can use to disable // inline completions based on its mode. enable_inline_completions: bool, @@ -592,7 +640,6 @@ pub struct Editor { tasks: BTreeMap<(BufferId, BufferRow), RunnableTasks>, tasks_update_task: Option>, previous_search_ranges: Option]>>, - file_header_size: u32, breadcrumb_header: Option, focused_block: Option, next_scroll_position: NextScrollCursorCenterTopBottom, @@ -626,7 +673,7 @@ pub struct EditorSnapshot { show_git_diff_gutter: Option, show_code_actions: Option, show_runnables: Option, - render_git_blame_gutter: bool, + git_blame_gutter_max_author_length: Option, pub display_snapshot: DisplaySnapshot, pub placeholder_text: Option>, is_focused: bool, @@ -636,7 +683,7 @@ pub struct EditorSnapshot { gutter_hovered: bool, } -const GIT_BLAME_GUTTER_WIDTH_CHARS: f32 = 53.; +const GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED: usize = 20; #[derive(Default, Debug, Clone, Copy)] pub struct GutterDimensions { @@ -776,8 +823,8 @@ impl SelectionHistory { struct RowHighlight { index: usize, - range: RangeInclusive, - color: Option, + range: Range, + color: Hsla, should_autoscroll: bool, } @@ -839,12 +886,12 @@ enum ContextMenu { impl ContextMenu { fn select_first( &mut self, - project: Option<&Model>, + provider: Option<&dyn CompletionProvider>, cx: &mut ViewContext, ) -> bool { if self.visible() { match self { - ContextMenu::Completions(menu) => menu.select_first(project, cx), + ContextMenu::Completions(menu) => menu.select_first(provider, cx), ContextMenu::CodeActions(menu) => menu.select_first(cx), } true @@ -855,12 +902,12 @@ impl ContextMenu { fn select_prev( &mut self, - project: Option<&Model>, + provider: Option<&dyn CompletionProvider>, cx: &mut ViewContext, ) -> bool { if self.visible() { match self { - ContextMenu::Completions(menu) => menu.select_prev(project, cx), + ContextMenu::Completions(menu) => menu.select_prev(provider, cx), ContextMenu::CodeActions(menu) => menu.select_prev(cx), } true @@ -871,12 +918,12 @@ impl ContextMenu { fn select_next( &mut self, - project: Option<&Model>, + provider: Option<&dyn CompletionProvider>, cx: &mut ViewContext, ) -> bool { if self.visible() { match self { - ContextMenu::Completions(menu) => menu.select_next(project, cx), + ContextMenu::Completions(menu) => menu.select_next(provider, cx), ContextMenu::CodeActions(menu) => menu.select_next(cx), } true @@ -887,12 +934,12 @@ impl ContextMenu { fn select_last( &mut self, - project: Option<&Model>, + provider: Option<&dyn CompletionProvider>, cx: &mut ViewContext, ) -> bool { if self.visible() { match self { - ContextMenu::Completions(menu) => menu.select_last(project, cx), + ContextMenu::Completions(menu) => menu.select_last(provider, cx), ContextMenu::CodeActions(menu) => menu.select_last(cx), } true @@ -946,39 +993,55 @@ struct CompletionsMenu { } impl CompletionsMenu { - fn select_first(&mut self, project: Option<&Model>, cx: &mut ViewContext) { + fn select_first( + &mut self, + provider: Option<&dyn CompletionProvider>, + cx: &mut ViewContext, + ) { self.selected_item = 0; self.scroll_handle.scroll_to_item(self.selected_item); - self.attempt_resolve_selected_completion_documentation(project, cx); + self.attempt_resolve_selected_completion_documentation(provider, cx); cx.notify(); } - fn select_prev(&mut self, project: Option<&Model>, cx: &mut ViewContext) { + fn select_prev( + &mut self, + provider: Option<&dyn CompletionProvider>, + cx: &mut ViewContext, + ) { if self.selected_item > 0 { self.selected_item -= 1; } else { self.selected_item = self.matches.len() - 1; } self.scroll_handle.scroll_to_item(self.selected_item); - self.attempt_resolve_selected_completion_documentation(project, cx); + self.attempt_resolve_selected_completion_documentation(provider, cx); cx.notify(); } - fn select_next(&mut self, project: Option<&Model>, cx: &mut ViewContext) { + fn select_next( + &mut self, + provider: Option<&dyn CompletionProvider>, + cx: &mut ViewContext, + ) { if self.selected_item + 1 < self.matches.len() { self.selected_item += 1; } else { self.selected_item = 0; } self.scroll_handle.scroll_to_item(self.selected_item); - self.attempt_resolve_selected_completion_documentation(project, cx); + self.attempt_resolve_selected_completion_documentation(provider, cx); cx.notify(); } - fn select_last(&mut self, project: Option<&Model>, cx: &mut ViewContext) { + fn select_last( + &mut self, + provider: Option<&dyn CompletionProvider>, + cx: &mut ViewContext, + ) { self.selected_item = self.matches.len() - 1; self.scroll_handle.scroll_to_item(self.selected_item); - self.attempt_resolve_selected_completion_documentation(project, cx); + self.attempt_resolve_selected_completion_documentation(provider, cx); cx.notify(); } @@ -1014,7 +1077,7 @@ impl CompletionsMenu { fn attempt_resolve_selected_completion_documentation( &mut self, - project: Option<&Model>, + provider: Option<&dyn CompletionProvider>, cx: &mut ViewContext, ) { let settings = EditorSettings::get_global(cx); @@ -1023,18 +1086,16 @@ impl CompletionsMenu { } let completion_index = self.matches[self.selected_item].candidate_id; - let Some(project) = project else { + let Some(provider) = provider else { return; }; - let resolve_task = project.update(cx, |project, cx| { - project.resolve_completions( - self.buffer.clone(), - vec![completion_index], - self.completions.clone(), - cx, - ) - }); + let resolve_task = provider.resolve_completions( + self.buffer.clone(), + vec![completion_index], + self.completions.clone(), + cx, + ); let delay_ms = EditorSettings::get_global(cx).completion_documentation_secondary_query_debounce; @@ -1183,6 +1244,10 @@ impl CompletionsMenu { None }; + let color_swatch = completion + .color() + .map(|color| div().size_4().bg(color).rounded_sm()); + div().min_w(px(220.)).max_w(px(540.)).child( ListItem::new(mat.candidate_id) .inset(true) @@ -1198,6 +1263,7 @@ impl CompletionsMenu { task.detach_and_log_err(cx) } })) + .start_slot::
(color_swatch) .child(h_flex().overflow_hidden().child(completion_label)) .end_slot::
-To setup your current python to have an available kernel, run: +To setup your current Python to have an available kernel, run: ```sh pip install ipykernel @@ -99,7 +99,7 @@ python -m ipykernel install --user --name myenv --display-name "Python (myenv)" ### R (Ark Kernel) {#r-ark} -Install [Ark](https://github.com/posit-dev/ark/releases) by downloading the release for your operating system. E.g. for macOS just unpack `ark` binary and put it into `/usr/local/bin`. Then run: +Install [Ark](https://github.com/posit-dev/ark/releases) by downloading the release for your operating system. For example, for macOS just unpack `ark` binary and put it into `/usr/local/bin`. Then run: ```sh ark --install @@ -137,9 +137,9 @@ TBD: Improve Julia REPL instructions ### Scala -- Install Scala with `cs setup` (Coursier): https://www.scala-lang.org/download/ +- [Install Scala](https://www.scala-lang.org/download/) with `cs setup` (Coursier): - `brew install coursier/formulas/coursier && cs setup` -- REPL (Almond) Setup Instructions https://almond.sh/docs/quick-start-install +- REPL (Almond) [setup instructions](https://almond.sh/docs/quick-start-install): - `brew install --cask temurin` (Eclipse foundation official OpenJDK binaries) - `brew install coursier/formulas/coursier && cs setup` - `coursier launch --use-bootstrap almond -- --install` @@ -180,4 +180,4 @@ Available kernels: rust /Users/z/Library/Jupyter/kernels/rust ``` -Note: Zed makes best effort usage of `sys.prefix` and `CONDA_PREFIX` to find kernels in Python environments. If you want explicitly control run `python -m ipykernel install --user --name myenv --display-name "Python (myenv)"` to install the kernel directly while in the environment. +> Note: Zed makes best effort usage of `sys.prefix` and `CONDA_PREFIX` to find kernels in Python environments. If you want explicitly control run `python -m ipykernel install --user --name myenv --display-name "Python (myenv)"` to install the kernel directly while in the environment. diff --git a/docs/src/telemetry.md b/docs/src/telemetry.md index cdb44979eade0f..dd6556c24f0b9a 100644 --- a/docs/src/telemetry.md +++ b/docs/src/telemetry.md @@ -31,7 +31,7 @@ Telemetry is sent from the application to our servers. Data is proxied through o Diagnostic events include debug information (stack traces) from crash reports. Reports are sent on the first application launch after the crash occurred. We've built dashboards that allow us to visualize the frequency and severity of issues experienced by users. Having these reports sent automatically allows us to begin implementing fixes without the user needing to file a report in our issue tracker. The plots in the dashboards also give us an informal measurement of the stability of Zed. -You can see what data is sent when a panic occurs by inspecting the `Panic` struct in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs#L184) in the zed repo. You can find additional information in the [Debugging Crashes](./development/debugging-crashes.md) documentation. +You can see what data is sent when a panic occurs by inspecting the `Panic` struct in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs#L184) in the Zed repo. You can find additional information in the [Debugging Crashes](./development/debugging-crashes.md) documentation. ### Usage Data (Metrics) {#metrics} @@ -48,8 +48,8 @@ Usage Data is associated with a secure random telemetry ID which may be linked t You can audit the metrics data that Zed has reported by running the command {#action zed::OpenTelemetryLog} from the command palette, or clicking `Help > View Telemetry Log` in the application menu. -You can see the full list of the event types and exactly the data sent for each by inspecting the `Event` enum and the associated structs in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs#L63] in the zed repo. +You can see the full list of the event types and exactly the data sent for each by inspecting the `Event` enum and the associated structs in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs#L63) in the Zed repository. ## Concerns and Questions -If you have concerns about telemetry, please feel free to open issues in our [Zed repository](https://github.com/zed-industries/zed/issues/new/choose). +If you have concerns about telemetry, please feel free to [open an issue](https://github.com/zed-industries/zed/issues/new/choose). diff --git a/docs/src/vim.md b/docs/src/vim.md index d4e41b58199570..8bfa6aa73f6121 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -1,149 +1,387 @@ # Vim Mode -Zed includes a Vim emulation layer known as "vim mode". On this page, you will learn how to turn Zed's vim mode on or off, what tools and commands are available, and how to customize keybindings. +Zed includes a Vim emulation layer known as "vim mode". On this page, you will learn how to turn Zed's vim mode on or off, what tools and commands Zed provides to help you navigate and edit your code, and generally how to make the most of vim mode in Zed. -## Philosophy +You'll learn how to: -Vim mode tries to offer a familiar experience to Vim users: it replicates the behavior of motions and commands precisely when it makes sense and uses Zed-specific functionality to provide an editing experience that "just works" without requiring configuration on your part. This includes support for semantic navigation, multiple cursors, or other features usually provided by plugins like surrounding text. +- Understand the core differences between Zed's vim mode and traditional Vim +- Enable or disable vim mode +- Make the most of Zed-specific features within vim mode +- Customize vim mode key bindings +- Configure vim mode settings + +Whether you're new to vim mode or an experienced Vim user looking to optimize your Zed experience, this guide will help you harness the full power of modal editing in Zed. + +## Zed's vim mode design + +Vim mode tries to offer a familiar experience to Vim users: it replicates the behavior of motions and commands precisely when it makes sense and uses Zed-specific functionality to provide an editing experience that "just works" without requiring configuration on your part. + +This includes support for semantic navigation, multiple cursors, or other features usually provided by plugins like surrounding text. So, Zed's vim mode does not replicate Vim one-to-one, but it meshes Vim's modal design with Zed's modern features to provide a more fluid experience. It's also configurable, so you can add your own key bindings or override the defaults. -> **Note:** The foundations of Zed's vim mode should already cover many use cases, and we're always looking to improve it. If you find missing features that you rely on in your workflow, please [file an issue](https://github.com/zed-industries/zed/issues). +### Core differences + +There are four types of features in vim mode that use Zed's core functionality, leading to some differences in behavior: + +1. **Motions**: vim mode uses Zed's semantic parsing to tune the behavior of motions per language. For example, in Rust, jumping to matching bracket with `%` works with the pipe character `|`. In JavaScript, `w` considers `$` to be a word character. +2. **Visual block selections**: vim mode uses Zed's multiple cursor to emulate visual block selections, making block selections a lot more flexible. For example, anything you insert after a block selection updates on every line in real-time, and you can add or remove cursors anytime. +3. **Macros**: vim mode uses Zed's recording system for vim macros. So, you can capture and replay more complex actions, like autocompletion. +4. **Search and replace**: vim mode uses Zed's search system, so, the syntax for regular expressions is slightly different compared to Vim. [Head to the Regex differences section](#regex-differences) for details. + +> **Note:** The foundations of Zed's vim mode should already cover many use cases, and we're always looking to improve it. If you find missing features that you rely on in your workflow, please [file an issue on GitHub](https://github.com/zed-industries/zed/issues). ## Enabling and disabling vim mode -When you first open Zed, a checkbox will appear on the welcome screen, allowing you to enable vim mode. +When you first open Zed, you'll see a checkbox on the welcome screen that allows you to enable vim mode. If you missed this, you can toggle vim mode on or off anytime by opening the command palette and using the workspace command `toggle vim mode`. +> **Note**: This command toggles the following property in your user settings: +> +> ```json +> { +> "vim_mode": true +> } +> ``` + ## Zed-specific features Zed is built on a modern foundation that (among other things) uses tree-sitter and language servers to understand the content of the file you're editing and supports multiple cursors out of the box. Vim mode has several "core Zed" key bindings that will help you make the most of Zed's specific feature set. +### Language server + +The following commands use the language server to help you navigate and refactor your code. + +| Command | Default Shortcut | +| ---------------------------------------- | ---------------- | +| Go to definition | `g d` | +| Go to declaration | `g D` | +| Go to type definition | `g y` | +| Go to implementation | `g I` | +| Rename (change definition) | `c d` | +| Go to All references to the current word | `g A` | +| Find symbol in current file | `g s` | +| Find symbol in entire project | `g S` | +| Go to next diagnostic | `g ]` or `] d` | +| Go to previous diagnostic | `g [` or `[ d` | +| Show inline error (hover) | `g h` | +| Open the code actions menu | `g .` | + +### Git + +| Command | Default Shortcut | +| ------------------------- | ---------------- | +| Go to next git change | `] c` | +| Go to previous git change | `[ c` | + +### Treesitter + +Treesitter is a powerful tool that Zed uses to understand the structure of your code. These commands help you navigate your code semantically. + +| Command | Default Shortcut | +| ---------------------------- | ---------------- | +| Select a smaller syntax node | `] x` | +| Select a larger syntax node | `[ x` | + +### Multi cursor + +These commands help you manage multiple cursors in Zed. + +| Command | Default Shortcut | +| ------------------------------------------------------------ | ---------------- | +| Add a cursor selecting the next copy of the current word | `g l` | +| Add a cursor selecting the previous copy of the current word | `g L` | +| Skip latest word selection, and add next | `g >` | +| Skip latest word selection, and add previous | `g <` | +| Add a visual selection for every copy of the current word | `g a` | + +### Pane management + +These commands open new panes or jump to specific panes. + +| Command | Default Shortcut | +| ------------------------------------------ | ------------------ | +| Open a project-wide search | `g /` | +| Open the current search excerpt | `g ` | +| Open the current search excerpt in a split | ` ` | +| Go to definition in a split | ` g d` | +| Go to type definition in a split | ` g D` | + +### In insert mode + +The following commands help you bring up Zed's completion menu, request a suggestion from GitHub Copilot, or open the inline AI assistant without leaving insert mode. + +| Command | Default Shortcut | +| ---------------------------------------------------------------------------- | ---------------- | +| Open the completion menu | `ctrl-x ctrl-o` | +| Request GitHub Copilot suggestion (requires GitHub Copilot to be configured) | `ctrl-x ctrl-c` | +| Open the inline AI assistant (requires a configured assistant) | `ctrl-x ctrl-a` | +| Open the code actions menu | `ctrl-x ctrl-l` | +| Hides all suggestions | `ctrl-x ctrl-z` | + +### Supported plugins + +Zed's vim mode includes some features that are usually provided by very popular plugins in the Vim ecosystem: + +- You can surround text objects with `ys` (yank surround), change surrounding with `cs`, and delete surrounding with `ds`. +- You can comment and uncomment selections with `gc` in visual mode and `gcc` in normal mode. +- The project panel supports many shortcuts modeled after the Vim plugin `netrw`: navigation with `hjkl`, open file with `o`, open file in a new tab with `t`, etc. +- You can add key bindings to your keymap to navigate "camelCase" names. [Head down to the Optional key bindings](#optional-key-bindings) section to learn how. + +## Command palette + +Vim mode allows you to open Zed's command palette with `:`. You can then type to access any usual Zed command. Additionally, vim mode adds aliases for popular Vim commands to ensure your muscle memory transfers to Zed. For example, you can write `:w` or `:write` to save the file. + +Below, you'll find tables listing the commands you can use in the command palette. We put optional characters in square brackets to indicate that you can omit them. + +> **Note**: We don't emulate the full power of Vim's command line yet. In particular, commands currently do not support arguments. Please [file issues on GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. + +### File and window management + +This table shows commands for managing windows, tabs, and panes. As commands don't support arguments currently, you cannot specify a filename when saving or creating a new file. + +| Command | Description | +| -------------- | ---------------------------------------------------- | +| `:w[rite][!]` | Save the current file | +| `:wq[!]` | Save the file and close the buffer | +| `:q[uit][!]` | Close the buffer | +| `:wa[ll][!]` | Save all open files | +| `:wqa[ll][!]` | Save all open files and close all buffers | +| `:qa[ll][!]` | Close all buffers | +| `:[e]x[it][!]` | Close the buffer | +| `:up[date]` | Save the current file | +| `:cq` | Quit completely (close all running instances of Zed) | +| `:vs[plit]` | Split the pane vertically | +| `:sp[lit]` | Split the pane horizontally | +| `:new` | Create a new file in a horizontal split | +| `:vne[w]` | Create a new file in a vertical split | +| `:tabedit` | Create a new file in a new tab | +| `:tabnew` | Create a new file in a new tab | +| `:tabn[ext]` | Go to the next tab | +| `:tabp[rev]` | Go to previous tab | +| `:tabc[lose]` | Close the current tab | + +> **Note:** The `!` character is used to force the command to execute without saving changes or prompting before overwriting a file. + +### Ex commands + +These ex commands open Zed's various panels and windows. + +| Command | Default Shortcut | +| ---------------------------- | ---------------- | +| Open the project panel | `:E[xplore]` | +| Open the collaboration panel | `:C[ollab]` | +| Open the chat panel | `:Ch[at]` | +| Open the AI panel | `:A[I]` | +| Open the notifications panel | `:No[tif]` | +| Open the feedback window | `:fe[edback]` | +| Open the diagnostics window | `:cl[ist]` | +| Open the terminal | `:te[rm]` | +| Open the extensions window | `:Ext[ensions]` | + +### Navigating diagnostics + +These commands navigate diagnostics. + +| Command | Description | +| ------------------------ | ------------------------------ | +| `:cn[ext]` or `:ln[ext]` | Go to the next diagnostic | +| `:cp[rev]` or `:lp[rev]` | Go to the previous diagnostics | +| `:cc` or `:ll` | Open the errors page | + +### Git + +These commands interact with the version control system git. + +| Command | Description | +| --------------- | ------------------------------------------------------- | +| `:dif[fupdate]` | View the diff under the cursor (`d o` in normal mode) | +| `:rev[ert]` | Revert the diff under the cursor (`d p` in normal mode) | + +### Jump + +These commands jump to specific positions in the file. + +| Command | Description | +| ------------------- | ----------------------------------- | +| `:` | Jump to a line number | +| `:$` | Jump to the end of the file | +| `:/foo` and `:?foo` | Jump to next/prev line matching foo | + +### Replacement + +This command replaces text. It emulates the substitute command in vim. The substitute command uses regular expressions, and Zed uses a slightly different syntax than vim. You can learn more about Zed's syntax below, [in the regex differences section](#regex-differences). Also, by default, Zed always replaces all occurrences of the search pattern in the current line. + +| Command | Description | +| -------------------- | --------------------------------- | +| `:[range]s/foo/bar/` | Replace instances of foo with bar | + +### Editing + +These commands help you edit text. + +| Command | Description | +| ----------------- | ------------------------------------------------------- | +| `:j[oin]` | Join the current line | +| `:d[elete][l][p]` | Delete the current line | +| `:s[ort] [i]` | Sort the current selection (with i, case-insensitively) | +| `:y[ank]` | Yank (copy) the current selection or line | + +### Command mnemonics + +As any Zed command is available, you may find that it's helpful to remember mnemonics that run the correct command. For example: + +- `:diffs` for "toggle all hunk diffs" +- `:cpp` for "copy path to file" +- `:crp` for "copy relative path" +- `:reveal` for "reveal in finder" +- `:zlog` for "open zed log" +- `:clank` for "cancel language server work" + +## Customizing key bindings + +In this section, we'll learn how to customize the key bindings of Zed's vim mode. You'll learn: + +- How to select the correct context for your new key bindings. +- Useful contexts for vim mode key bindings. +- Common key bindings to customize for extra productivity. + +### Selecting the correct context + +Zed's key bindings are evaluated only when the `"context"` property matches your location in the editor. For example, if you add key bindings to the `"Editor"` context, they will only work when you're editing a file. If you add key bindings to the `"Workspace"` context, they will work everywhere in Zed. Here's an example of a key binding that saves when you're editing a file: + +```json +{ + "context": "Editor", + "bindings": { + "ctrl-s": "file::Save" + } +} ``` -# Language server -g d Go to definition -g D Go to declaration -g y Go to type definition -g I Go to implementation - -c d Rename (change definition) -g A Go to All references to the current word - -g s Find symbol in current file -g S Find symbol in entire project - -g ] Go to next diagnostic -g [ Go to previous diagnostic -] d Go to next diagnostic -[ d Go to previous diagnostic -g h Show inline error (hover) -g . Open the code actions menu - -# Git -] c Go to next git change -[ c Go to previous git change - -# Treesitter -] x Select a smaller syntax node -[ x Select a larger syntax node - -# Multi cursor -g l Add a visual selection for the next copy of the current word -g L The same, but backwards -g > Skip latest word selection, and add next. -g < The same, but backwards -g a Add a visual selection for every copy of the current word - -# Pane management -g / Open a project-wide search -g Open the current search excerpt - Open the current search excerpt in a split - g d Go to definition in a split - g D Go to type definition in a split - -# Insert mode -ctrl-x ctrl-o Open the completion menu -ctrl-x ctrl-c Request GitHub Copilot suggestion (if configured) -ctrl-x ctrl-a Open the inline AI assistant (if configured) -ctrl-x ctrl-l Open the code actions menu -ctrl-x ctrl-z Hides all suggestions - -# Ex commands -:E[xplore] Open the project panel -:C[ollab] Open the collaboration panel -:Ch[at] Open the chat panel -:A[I] Open the AI panel -:No[tif] Open the notifications panel -:fe[edback] Open the feedback window -:cl[ist] Open the diagnostics window -:te[rm] Open the terminal -:Ext[ensions] Open the extensions window + +Contexts are nested, so when you're editing a file, the context is the `"Editor"` context, which is inside the `"Pane"` context, which is inside the `"Workspace"` context. That's why any key bindings you add to the `"Workspace"` context will work when you're editing a file. Here's an example: + +```json +// This key binding will work when you're editing a file. It comes built into Zed by default as the workspace: save command. +{ + "context": "Workspace", + "bindings": { + "ctrl-s": "file::Save" + } +} ``` -Vim mode uses Zed to define concepts like "brackets" (for the `%` key) and "words" (for motions like `w` and `e`). This does lead to some differences, but they are mostly positive. For example `%` considers `|` to be a bracket in languages like Rust; and `w` considers `$` to be a word-character in languages like Javascript. +Contexts are expressions. They support boolean operators like `&&` (and) and `||` (or). For example, you can use the context `"Editor && vim_mode == normal"` to create key bindings that only work when you're editing a file _and_ you're in vim's normal mode. -Vim mode emulates visual block mode using Zed's multiple cursor support. This again leads to some differences, but is much more powerful. +Vim mode adds several contexts to the `"Editor"` context: -Vim's macro support (`q` and `@`) is implemented using Zed's actions. This lets us support recording and replaying of autocompleted code, etc. Unlike Vim, Zed does not re-use the yank registers for recording macros, they are two separate namespaces. +| Operator | Description | +| -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| VimControl | Indicates that vim keybindings should work. Currently an alias for `vim_mode == normal \|\| vim_mode == visual \|\| vim_mode == operator`, but the definition may change over time | +| vim_mode == normal | Normal mode | +| vim_mode == visual | Visual mode | +| vim_mode == insert | Insert mode | +| vim_mode == replace | Replace mode | +| vim_mode == waiting | Waiting for an arbitrary key (e.g., after typing `f` or `t`) | +| vim_mode == operator | Waiting for another binding to trigger (e.g., after typing `c` or `d`) | +| vim_operator | Set to `none` unless `vim_mode == operator`, in which case it is set to the current operator's default keybinding (e.g., after typing `d`, `vim_operator == d`) | -Finally, vim mode's search and replace functionality is backed by Zed's. This means that the pattern syntax is slightly different, see the section on [Regex differences](#regex-differences) for details. +> **Note**: Contexts are matched only on one level at a time. So it is possible to use the expression `"Editor && vim_mode == normal"`, but `"Workspace && vim_mode == normal"` will never match because we set the vim context at the `"Editor"` level. -## Custom key bindings +### Useful contexts for vim mode key bindings -You can edit your personal key bindings with `:keymap`. -For vim-specific shortcuts, you may find the following template a good place to start. +Here's a template with useful vim mode contexts to help you customize your vim mode key bindings. You can copy it and integrate it into your user keymap. ```json [ { "context": "VimControl && !menu", "bindings": { - // put key-bindings here if you want them to work in normal & visual mode + // Put key bindings here if you want them to work in normal & visual mode. } }, { "context": "vim_mode == normal && !menu", "bindings": { - // "shift-y": ["workspace::SendKeystrokes", "y $"] // use nvim's Y behavior + // "shift-y": ["workspace::SendKeystrokes", "y $"] // Use neovim's yank behavior: yank to end of line. } }, { "context": "vim_mode == insert", "bindings": { - // "j k": "vim::NormalBefore" // remap jk in insert mode to escape. + // "j k": "vim::NormalBefore" // In insert mode, make jk escape to normal mode. } }, { "context": "EmptyPane || SharedScreen", "bindings": { - // put key-bindings here (in addition to above) if you want them to - // work when no editor exists + // Put key bindings here (in addition to the context above) if you want them to + // work when no editor exists. // "space f": "file_finder::Toggle" } } ] ``` -If you would like to emulate vim's `map` (`nmap` etc.) commands you can bind to the [`workspace::SendKeystrokes`](./key-bindings.md#remapping-keys) action in the correct context. +> **Note**: If you would like to emulate Vim's `map` commands (`nmap`, etc.), you can use the action `workspace::SendKeystrokes` in the correct context. -You can see the bindings that are enabled by default in vim mode [here](https://github.com/zed-industries/zed/blob/main/assets/keymaps/vim.json). +### Optional key bindings -### Contexts +By default, you can navigate between the different files open in the editor with shortcuts like `ctrl+w` followed by one of `hjkl` to move to the left, down, up, or right, respectively. -Zed's keyboard bindings are evaluated only when the `"context"` matches the location you are in on the screen. Locations are nested, so when you're editing you're in the `"Workspace"` location is at the top, containing a `"Pane"` which contains an `"Editor"`. Contexts are matched only on one level at a time. So it is possible to combine `Editor && vim_mode == normal`, but `Workspace && vim_mode == normal` will never match because we set the vim context at the `Editor` level. +But you cannot use the same shortcuts to move between all the editor docks (the terminal, project panel, assistant panel, ...). If you want to use the same shortcuts to navigate to the docks, you can add the following key bindings to your user keymap. + +```json +{ + "context": "Dock", + "bindings": { + "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], + "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], + "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], + "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"] + // ... or other keybindings + } +} +``` + +Subword motion, which allows you to navigate and select individual words in camelCase or snake_case, is not enabled by default. To enable it, add these bindings to your keymap. + +```json +[ + { + "context": "VimControl && !menu && vim_mode != operator", + "bindings": { + "w": "vim::NextSubwordStart", + "b": "vim::PreviousSubwordStart", + "e": "vim::NextSubwordEnd", + "g e": "vim::PreviousSubwordEnd" + } + } +] +``` -Vim mode adds several contexts to the `Editor`: +Vim mode comes with shortcuts to surround the selection in normal mode (`ys`), but it doesn't have a shortcut to add surrounds in visual mode. By default, `shift-s` substitutes the selection (erases the text and enters insert mode). To use `shift-s` to add surrounds in visual mode, you can add the following object to your keymap. -- `vim_mode` is similar to, but not identical to, the current mode. It starts as one of `normal`, `visual`, `insert` or `replace` (depending on your mode). If you are mid-way through typing a sequence, `vim_mode` will be either `waiting` if it's waiting for an arbitrary key (for example after typing `f` or `t`), or `operator` if it's waiting for another binding to trigger (for example after typing `c` or `d`). -- `vim_operator` is set to `none` unless `vim_mode == operator` in which case it is set to the current operator's default keybinding (for example after typing `d`, `vim_operator == d`). -- `"VimControl"` indicates that vim keybindings should work. It is currently an alias for `vim_mode == normal || vim_mode == visual || vim_mode == operator`, but the definition may change over time. +```json +{ + "context": "vim_mode == visual", + "bindings": { + "shift-s": [ + "vim::PushOperator", + { + "AddSurrounds": {} + } + ] + } +} +``` ### Restoring common text editing keybindings -If you're using vim mode on Linux or Windows, you may find it overrides keybindings you can't live without: Ctrl+v to copy, Ctrl+f to search, etc. You can restore them by copying this data into your keymap: +If you're using vim mode on Linux or Windows, you may find it overrides keybindings you can't live without: `ctrl+v` to copy, `ctrl+f` to search, etc. You can restore them by copying this data into your keymap: ```json { @@ -160,109 +398,39 @@ If you're using vim mode on Linux or Windows, you may find it overrides keybindi }, ``` -## Command palette - -Vim mode allows you to enable Zed’s command palette with `:`. This means that you can use vim's command palette to run any action that Zed supports. - -Additionally, vim mode contains a number of aliases for popular vim commands to ensure that muscle memory works. For example `:w` will save the file. - -We do not (yet) emulate the full power of vim’s command line, in particular, we do not support arguments to commands yet. Please reach out on [GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. - -As mentioned above, one thing to be aware of is that the regex engine is slightly different from vim's in `:%s/a/b`. - -Currently supported vim-specific commands: +## Changing vim mode settings -``` -# window management -:w[rite][!], :wq[!], :q[uit][!], :wa[ll][!], :wqa[ll][!], :qa[ll][!], :[e]x[it][!], :up[date] - to save/close tab(s) and pane(s) (no filename is supported yet) -:cq - to quit completely. -:vs[plit], :sp[lit] - to split vertically/horizontally (no filename is supported yet) -:new, :vne[w] - to create a new file in a new pane above or to the left -:tabedit, :tabnew - to create a new file in a new tab. -:tabn[ext], :tabp[rev] - to go to previous/next tabs -:tabc[lose] - to close the current tab - -# navigating diagnostics -:cn[ext], :cp[rev], :ln[ext], :lp[rev] - to go to the next/prev diagnostics -:cc, :ll - to open the errors page - -# handling git diff -:dif[fupdate] - to view the diff under the cursor ("d o" in normal mode) -:rev[ert] - to revert the diff under the cursor ("d p" in normal mode) - -# jump to position -: - to jump to a line number -:$ - to jump to the end of the file -:/foo and :?foo - to jump to next/prev line matching foo - -# replacement (/g is always assumed and Zed uses different regex syntax to vim) -:[range]s/foo/bar/ - to replace instances of foo with bar - -# editing -:j[oin] - to join the current line (no range is yet supported) -:d[elete][l][p] - to delete the current line (no range is yet supported) -:s[ort] [i] - to sort the current selection (with i, case-insensitively) -:y[ank] -``` - -As any Zed command is available, you may find that it's helpful to remember mnemonics that run the correct command. For example: +You can change the following settings to modify vim mode's behavior: -``` -:diffs Toggle all Hunk [Diffs] -:cpp [C]o[p]y [P]ath to file -:crp [C]opy [r]elative [P]ath -:reveal [Reveal] in finder -:zlog Open [Z]ed Log -:clank [C]ancel [lan]guage server work[k] -``` +| Property | Description | Default Value | +| ---------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- | +| use_system_clipboard | Determines how system clipboard is used:
  • "always": use for all operations
  • "never": only use when explicitly specified
  • "on_yank": use for yank operations
| "always" | +| use_multiline_find | If `true`, `f` and `t` motions extend across multiple lines. | false | +| use_smartcase_find | If `true`, `f` and `t` motions are case-insensitive when the target letter is lowercase. | false | +| toggle_relative_line_numbers | If `true`, line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options. | false | +| custom_digraphs | An object that allows you to add custom digraphs. Read below for an example. | {} | -## Settings - -Vim mode is not enabled by default. To enable vim mode, you need to add the following configuration to your settings file: +Here's an example of adding a digraph for the zombie emoji. This allows you to type `ctrl-k f z` to insert a zombie emoji. You can add as many digraphs as you like. ```json { - "vim_mode": true + "vim": { + "custom_digraphs": { + "fz": "🧟‍♀️" + } + } } ``` -Alternatively, you can enable vim mode by running the `toggle vim mode` command from the command palette. - -Some vim settings are available to modify the default vim behavior: +Here's an example of these settings changed: ```json { "vim": { - // "always": use system clipboard when no register is specified - // "never": don't use system clipboard unless "+ or "* is specified - // "on_yank": use system clipboard for yank operations when no register is specified - "use_system_clipboard": "always", - // Let `f` and `t` motions extend across multiple lines + "use_system_clipboard": "never", "use_multiline_find": true, - // Let `f` and `t` motions match case insensitively if the target is lowercase "use_smartcase_find": true, - // Use relative line numbers in normal mode, absolute in insert mode - // c.f. https://github.com/jeffkreeftmeijer/vim-numbertoggle "toggle_relative_line_numbers": true, - // Add custom digraphs (e.g. ctrl-k f z will insert a zombie emoji) "custom_digraphs": { "fz": "🧟‍♀️" } @@ -270,22 +438,36 @@ Some vim settings are available to modify the default vim behavior: } ``` -There are also a few Zed settings that you may also enjoy if you use vim mode: +## Useful core Zed settings for vim mode + +Here are a few general Zed settings that can help you fine-tune your Vim experience: + +| Property | Description | Default Value | +| ----------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------- | +| cursor_blink | If `true`, the cursor blinks. | `true` | +| relative_line_numbers | If `true`, line numbers in the left gutter are relative to the cursor. | `true` | +| scrollbar | Object that controls the scrollbar display. Set to `{ "show": "never" }` to hide the scroll bar. | `{ "show": "always" }` | +| scroll_beyond_last_line | If set to `"one_page"`, allows scrolling up to one page beyond the last line. Set to `"off"` to prevent this behavior. | `"one_page"` | +| vertical_scroll_margin | The number of lines to keep above or below the cursor when scrolling. Set to `0` to allow the cursor to go up to the edges of the screen vertically. | `3` | +| gutter.line_numbers | Controls the display of line numbers in the gutter. Set the `"line_numbers"` property to `false` to hide line numbers. | `true` | +| command_aliases | Object that defines aliases for commands in the command palette. You can use it to define shortcut names for commands you use often. Read below for examples. | `{}` | + +Here's an example of these settings changed: ```json { - // disable cursor blink + // Disable cursor blink "cursor_blink": false, - // use relative line numbers + // Use relative line numbers "relative_line_numbers": true, - // hide the scroll bar + // Hide the scroll bar "scrollbar": { "show": "never" }, - // prevent the buffer from scrolling beyond the last line + // Prevent the buffer from scrolling beyond the last line "scroll_beyond_last_line": "off", - // allow cursor to reach edges of screen + // Allow the cursor to reach the edges of the screen "vertical_scroll_margin": 0, "gutter": { - // disable line numbers completely: + // Disable line numbers completely: "line_numbers": false }, "command_aliases": { @@ -296,74 +478,17 @@ There are also a few Zed settings that you may also enjoy if you use vim mode: } ``` -If you want to navigate between the editor and docks (terminal, project panel, AI assistant, ...) just like you navigate between splits you can use the following key bindings: - -```json -{ - "context": "Dock", - "bindings": { - "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], - "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], - "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], - "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"] - // ... or other keybindings - } -} -``` - -Subword motion is not enabled by default. To enable it, add these bindings to your keymap. - -```json -[ - { - "context": "VimControl && !menu && vim_mode != operator", - "bindings": { - "w": "vim::NextSubwordStart", - "b": "vim::PreviousSubwordStart", - "e": "vim::NextSubwordEnd", - "g e": "vim::PreviousSubwordEnd" - } - } -] -``` - -Surrounding the selection in visual mode is also not enabled by default (`shift-s` normally behaves like `c`). To enable it, add the following to your keymap. - -```json -{ - "context": "vim_mode == visual", - "bindings": { - "shift-s": [ - "vim::PushOperator", - { - "AddSurrounds": {} - } - ] - } -} -``` - -## Supported plugins - -Zed has nascent support for some Vim plugins: - -- From `vim-surround`, `ys`, `cs` and `ds` work. Though you cannot add new HTML tags yet. -- From `vim-commentary`, `gc` in visual mode and `gcc` in normal mode. Though you cannot operate on arbitrary objects yet. -- From `netrw`, most keybindings are supported in the project panel. -- From `vim-spider`/`CamelCaseMotion` you can use subword motions as described above. +The `command_aliases` property is a single object that maps keys or key sequences to vim mode commands. The example above defines multiple aliases: `W` for `w`, `Wq` for `wq`, and `Q` for `q`. ## Regex differences -Zed uses a different regular expression engine from Vim. This means that you will have to use a different syntax for some things. - -Notably: +Zed uses a different regular expression engine from Vim. This means that you will have to use a different syntax in some cases. Here are the most common differences: -- Vim uses `\(` and `\)` to represent capture groups, in Zed these are `(` and `)`. -- On the flip side, `(` and `)` represent literal parentheses, but in Zed these must be escaped to `\(` and `\)`. -- When replacing, Vim uses `\0` to represent the entire match, in Zed this is `$0`, same for numbered capture groups `\1` -> `$1`. -- Vim uses `/g` to indicate "all matches on one line", in Zed this is implied -- Vim uses `/i` to indicate "case-insensitive", in Zed you can either use `(?i)` at the start of the pattern or toggle case-sensitivity with `cmd-option-c`. +- **Capture groups**: Vim uses `\(` and `\)` to represent capture groups, in Zed these are `(` and `)`. On the flip side, in Vim, `(` and `)` represent literal parentheses, but in Zed these must be escaped to `\(` and `\)`. +- **Matches**: When replacing, Vim uses the backslash character followed by a number to represent a matched capture group. For example, `\1`. Zed uses the dollar sign instead. So, when in Vim you use `\0` to represent the entire match, in Zed the syntax is `$0` instead. Same for numbered capture groups: `\1` in Vim is `$1` in Zed. +- **Global option**: By default, in Vim, regex searches only match the first occurrence on a line, and you append `/g` at the end of your query to find all matches. In Zed, regex searches are global by default. +- **Case sensitivity**: Vim uses `/i` to indicate a case-insensitive search. In Zed you can either write `(?i)` at the start of the pattern or toggle case-sensitivity with the shortcut {#kb search::ToggleCaseSensitive}. -To help with the transition, the command palette will fix parentheses and replace groups for you when you run `:%s//`. So `%s:/\(a\)(b)/\1/` will be converted into a search for "(a)\(b\)" and a replacement of "$1". +> **Note**: To help with the transition, the command palette will fix parentheses and replace groups for you when you write a Vim-style substitute command, `:%s//`. So, Zed will convert `%s:/\(a\)(b)/\1/` into a search for "(a)\(b\)" and a replacement of "$1". -For the full syntax supported by Zed's regex engine see the [regex crate documentation](https://docs.rs/regex/latest/regex/#syntax). +For the full syntax supported by Zed's regex engine [see the regex crate documentation](https://docs.rs/regex/latest/regex/#syntax). diff --git a/docs/theme/css/chrome.css b/docs/theme/css/chrome.css index 7afeabc704ba36..11f16848d70eb4 100644 --- a/docs/theme/css/chrome.css +++ b/docs/theme/css/chrome.css @@ -3,7 +3,7 @@ @import "variables.css"; html { - background-color: rgb(246, 245, 240); + background-color: var(--bg); scrollbar-color: var(--scrollbar) var(--bg); } #searchresults a, @@ -58,7 +58,7 @@ a > .hljs { height: var(--menu-bar-height); } #menu-bar.bordered { - border-block-end-color: var(--table-border-color); + border-block-end-color: var(--divider); } #menu-bar i, #menu-bar .icon-button { @@ -73,7 +73,7 @@ a > .hljs { transition: color 0.5s; } #menu-bar .icon-button:hover { - background-color: hsl(219, 93%, 42%, 0.15); + background-color: var(--icon-btn-bg-hover); } @media only screen and (max-width: 420px) { @@ -116,6 +116,7 @@ a > .hljs { align-items: center; flex: 1; overflow: hidden; + filter: var(--logo-brightness); } .js .menu-title { cursor: pointer; @@ -249,9 +250,10 @@ a:hover > .hljs { } pre { - background-color: white; - border: 1px rgba(8, 76, 207, 0.3) solid; - box-shadow: rgba(8, 76, 207, 0.07) 4px 4px 0px 0px; + background-color: var(--pre-bg); + border: 1px solid; + border-color: var(--pre-border); + box-shadow: var(--pre-shadow) 4px 4px 0px 0px; position: relative; } pre > .hljs { @@ -445,7 +447,8 @@ ul#searchresults span.teaser em { overscroll-behavior-y: contain; background-color: var(--sidebar-bg); color: var(--sidebar-fg); - border-right: 1px solid hsl(219, 93%, 42%, 0.15); + border-right: 1px solid; + border-color: var(--divider); } [dir="rtl"] .sidebar { left: unset; @@ -606,7 +609,7 @@ ul#searchresults span.teaser em { margin: 5px 0px; } .chapter .spacer { - background-color: var(--sidebar-spacer); + background-color: var(--divider); } @media (-moz-touch-enabled: 1), (pointer: coarse) { @@ -628,11 +631,11 @@ ul#searchresults span.teaser em { .theme-popup { position: absolute; - left: 10px; - top: var(--menu-bar-height); + left: 32px; + top: calc(var(--menu-bar-height) - 12px); z-index: 1000; border-radius: 4px; - font-size: 0.7em; + font-size: 1.4rem; color: var(--fg); background: var(--theme-popup-bg); border: 1px solid var(--theme-popup-border); @@ -654,7 +657,7 @@ ul#searchresults span.teaser em { width: 100%; border: 0; margin: 0; - padding: 2px 20px; + padding: 2px 24px; line-height: 25px; white-space: nowrap; text-align: start; @@ -662,32 +665,36 @@ ul#searchresults span.teaser em { color: inherit; background: inherit; font-size: inherit; + font-family: inherit; } .theme-popup .theme:hover { background-color: var(--theme-hover); } .theme-selected::before { + font-family: Arial, Helvetica, sans-serif; + text-align: center; display: inline-block; content: "✓"; - margin-inline-start: -14px; - width: 14px; + margin-inline-start: -20px; + width: 20px; } .download-button { - background: hsl(220, 60%, 95%); - color: hsl(220, 60%, 30%); + background: var(--download-btn-bg); + color: var(--download-btn-color); padding: 4px 8px; - border: 1px solid hsla(220, 60%, 40%, 0.2); + border: 1px solid; + border-color: var(--download-btn-border); font-size: 1.4rem; border-radius: 4px; - box-shadow: hsla(220, 40%, 60%, 0.1) 0px -2px 0px 0px inset; + box-shadow: var(--download-btn-shadow) 0px -2px 0px 0px inset; transition: 100ms; transition-property: box-shadow, border-color, background-color; } .download-button:hover { - background: hsl(220, 60%, 93%); - border-color: hsla(220, 60%, 50%, 0.2); + background: var(--download-btn-bg); + border-color: var(--download-btn-border-hover); box-shadow: none; } diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index 9a20751f211e74..d1b8e9b92653e7 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -174,14 +174,15 @@ h6:target::before { } .content a { text-decoration: underline; - text-decoration-color: hsl(219, 93%, 42%, 0.2); + text-decoration-color: var(--link-line-decoration); } .content a:hover { - text-decoration-color: hsl(219, 93%, 42%, 0.5); + text-decoration-color: var(--link-line-decoration-hover); } .content img, .content video { max-width: 100%; + background-color: var(--media-bg); border: 1px solid; border-color: var(--border); border-radius: 8px; @@ -219,7 +220,7 @@ table thead td { } table thead th { padding: 6px 12px; - color: #000; + color: var(--full-contrast); text-align: left; border: 1px var(--table-border-color) solid; } @@ -235,7 +236,7 @@ blockquote { margin: auto; margin-top: 1rem; padding: 1rem 1.25rem; - color: #000; + color: var(--full-contrast); background-color: var(--quote-bg); border: 1px solid var(--quote-border); } @@ -268,7 +269,7 @@ blockquote .warning:before { .warning { margin: auto; padding: 1rem 1.25rem; - color: #000; + color: var(--full-contrast); background-color: var(--warning-bg); border: 1px solid var(--warning-border); } @@ -315,7 +316,7 @@ kbd { font-size: 1.4rem; margin: 0.5em 0; border-bottom: 1px solid; - border-color: var(--border-light); + border-color: var(--divider); } .footnote-definition p { display: inline; @@ -355,7 +356,7 @@ kbd { font-style: italic; } -code.hljs { - color: hsl(221, 13%, 10%) !important; - background-color: hsla(221, 93%, 42%, 0.1); +code:not(pre code).hljs { + color: var(--code-text) !important; + background-color: var(--code-bg) !important; } diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index 481cb5a6c183c2..55ae4a427da269 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -1,6 +1,10 @@ /* Globals */ :root { + --color-scheme: light; + + --logo-brightness: brightness(1); + --sidebar-width: 300px; --sidebar-resize-indicator-width: 0px; --sidebar-resize-indicator-space: 2px; @@ -22,20 +26,34 @@ --border-light: hsl(220, 13%, 90%); --border-hover: hsl(220, 13%, 70%); + --media-bg: hsl(50, 25%, 92%); + --sidebar-fg: hsl(0, 0%, 0%); --sidebar-non-existant: #aaaaaa; - --sidebar-active: hsl(219, 93%, 42%); - --sidebar-active-bg: hsl(219, 93%, 42%, 0.1); - --sidebar-spacer: #f4f4f4; + --sidebar-active: hsl(220, 93%, 42%); + --sidebar-active-bg: hsl(220, 93%, 42%, 0.1); + --divider: hsl(220, 93%, 42%, 0.15); --scrollbar: #8f8f8f; --icons: #747474; --icons-hover: #000000; + --icon-btn-bg-hover: hsl(220, 93%, 42%, 0.15); - --links: rgb(8, 76, 207); + --links: hsl(220, 92%, 42%); + --link-line-decoration: hsl(220, 93%, 42%, 0.2); + --link-line-decoration-hover: hsl(220, 93%, 42%, 0.5); + + --full-contrast: #000; --inline-code-color: #301900; + --code-text: hsl(220, 13%, 10%); + --code-bg: hsl(220, 93%, 42%, 0.1); + --keybinding-bg: hsl(0, 0%, 94%); + + --pre-bg: #fff; + --pre-border: hsla(220, 93%, 42%, 0.3); + --pre-shadow: hsla(220, 93%, 42%, 0.07); --theme-popup-bg: #fafafa; --theme-popup-border: #cccccc; @@ -48,9 +66,9 @@ --warning-bg: hsl(42, 100%, 60%, 0.1); --warning-icon: hsl(42, 100%, 30%); - --table-header-bg: hsl(219, 50%, 90%, 0.4); - --table-border-color: hsl(219, 93%, 42%, 0.15); - --table-alternate-bg: hsl(219, 10%, 90%, 0.4); + --table-header-bg: hsl(220, 50%, 90%, 0.4); + --table-border-color: hsl(220, 93%, 42%, 0.15); + --table-alternate-bg: hsl(220, 10%, 90%, 0.4); --searchbar-border-color: #aaa; --searchbar-bg: #fafafa; @@ -61,5 +79,84 @@ --searchresults-li-bg: #e4f2fe; --search-mark-bg: #a2cff5; - --color-scheme: light; + --download-btn-bg: hsl(220, 60%, 95%); + --download-btn-bg-hover: hsl(220, 60%, 93%); + --download-btn-color: hsl(220, 60%, 30%); + --download-btn-border: hsla(220, 60%, 40%, 0.2); + --download-btn-border-hover: hsla(220, 60%, 50%, 0.2); + --download-btn-shadow: hsla(220, 40%, 60%, 0.1); +} + +.dark { + --color-scheme: dark; + + --logo-brightness: brightness(2); + + --bg: hsl(220, 13%, 10%); + --fg: hsl(220, 14%, 70%); + --title-color: hsl(220, 92%, 80%); + + --border: hsl(220, 13%, 20%); + --border-light: hsl(220, 13%, 90%); + --border-hover: hsl(220, 13%, 40%); + + --media-bg: hsl(220, 13%, 8%); + + --sidebar-bg: hsl(220, 13%, 10%); + --sidebar-fg: hsl(220, 14%, 71%); + --sidebar-non-existant: #505254; + --sidebar-active: hsl(220, 92%, 75%); + --sidebar-active-bg: hsl(220, 93%, 42%, 0.25); + + --divider: hsl(220, 13%, 20%); + --scrollbar: hsl(220, 13%, 30%); + + --icons: hsl(220, 14%, 71%); + --icons-hover: hsl(220, 14%, 90%); + --icon-btn-bg-hover: hsl(220, 93%, 42%, 0.4); + + --links: hsl(220, 93%, 75%); + --link-line-decoration: hsl(220, 92%, 80%, 0.4); + --link-line-decoration-hover: hsl(220, 92%, 80%, 0.8); + --full-contrast: #fff; + + --inline-code-color: hsl(40, 100%, 80%); + --code-text: hsl(220, 13%, 95%); + --code-bg: hsl(220, 93%, 50%, 0.2); + --keybinding-bg: hsl(0, 0%, 12%); + + --pre-bg: hsl(220, 13%, 5%); + --pre-border: hsla(220, 93%, 70%, 0.3); + --pre-shadow: hsla(220, 93%, 70%, 0.1); + + --theme-popup-bg: hsl(220, 13%, 15%); + --theme-popup-border: hsl(220, 13%, 20%); + --theme-hover: hsl(220, 13%, 25%); + + --quote-bg: hsl(220, 13%, 25%, 0.4); + --quote-border: hsl(220, 13%, 32%, 0.5); + + --table-border-color: hsl(220, 13%, 30%, 0.5); + --table-header-bg: hsl(220, 13%, 25%, 0.5); + --table-alternate-bg: hsl(220, 13%, 20%, 0.4); + + --warning-border: hsl(25, 100%, 85%, 0.2); + --warning-bg: hsl(42, 100%, 40%, 0.1); + --warning-icon: hsl(42, 100%, 80%); + + --searchbar-border-color: hsl(220, 13%, 30%); + --searchbar-bg: hsl(220, 13%, 22%, 0.5); + --searchbar-fg: hsl(220, 14%, 71%); + --searchbar-shadow-color: hsl(220, 13%, 15%); + --searchresults-header-fg: hsl(220, 14%, 60%); + --searchresults-border-color: hsl(220, 13%, 30%); + --searchresults-li-bg: hsl(220, 13%, 25%); + --search-mark-bg: hsl(220, 93%, 60%); + + --download-btn-bg: hsl(220, 90%, 90%, 0.1); + --download-btn-bg-hover: hsl(220, 90%, 50%, 0.2); + --download-btn-color: hsl(220, 90%, 95%); + --download-btn-border: hsla(220, 90%, 80%, 0.2); + --download-btn-border-hover: hsla(220, 90%, 80%, 0.4); + --download-btn-shadow: hsla(220, 50%, 60%, 0.15); } diff --git a/docs/theme/highlight.css b/docs/theme/highlight.css new file mode 100644 index 00000000000000..9bd80f351612a8 --- /dev/null +++ b/docs/theme/highlight.css @@ -0,0 +1,252 @@ +/*! + Theme: GitHub + Description: Light theme as seen on github.com + Author: github.com + Maintainer: @Hirse + Updated: 2021-05-15 + + Outdated base version: https://github.com/primer/github-syntax-light + Current colors taken from GitHub's CSS +*/ + +.hljs { + color: #24292e; + background: #ffffff; + overflow-x: auto; +} + +.hljs-doctag, +.hljs-keyword, +.hljs-meta .hljs-keyword, +.hljs-template-tag, +.hljs-template-variable, +.hljs-type, +.hljs-variable.language_ { + /* prettylights-syntax-keyword */ + color: #d73a49; +} + +.hljs-title, +.hljs-title.class_, +.hljs-title.class_.inherited__, +.hljs-title.function_ { + /* prettylights-syntax-entity */ + color: #6f42c1; +} + +.hljs-attr, +.hljs-attribute, +.hljs-literal, +.hljs-meta, +.hljs-number, +.hljs-operator, +.hljs-variable, +.hljs-selector-attr, +.hljs-selector-class, +.hljs-selector-id { + /* prettylights-syntax-constant */ + color: #005cc5; +} + +.hljs-regexp, +.hljs-string, +.hljs-meta .hljs-string { + /* prettylights-syntax-string */ + color: #032f62; +} + +.hljs-built_in, +.hljs-symbol { + /* prettylights-syntax-variable */ + color: #e36209; +} + +.hljs-comment, +.hljs-code, +.hljs-formula { + /* prettylights-syntax-comment */ + color: #6a737d; +} + +.hljs-name, +.hljs-quote, +.hljs-selector-tag, +.hljs-selector-pseudo { + /* prettylights-syntax-entity-tag */ + color: #22863a; +} + +.hljs-subst { + /* prettylights-syntax-storage-modifier-import */ + color: #24292e; +} + +.hljs-section { + /* prettylights-syntax-markup-heading */ + color: #005cc5; + font-weight: bold; +} + +.hljs-bullet { + /* prettylights-syntax-markup-list */ + color: #735c0f; +} + +.hljs-emphasis { + /* prettylights-syntax-markup-italic */ + color: #24292e; + font-style: italic; +} + +.hljs-strong { + /* prettylights-syntax-markup-bold */ + color: #24292e; + font-weight: bold; +} + +.hljs-addition { + /* prettylights-syntax-markup-inserted */ + color: #22863a; + background-color: #f0fff4; +} + +.hljs-deletion { + /* prettylights-syntax-markup-deleted */ + color: #b31d28; + background-color: #ffeef0; +} + +.hljs-char.escape_, +.hljs-link, +.hljs-params, +.hljs-property, +.hljs-punctuation, +.hljs-tag { + /* purposely ignored */ +} + +/*! + Theme: GitHub Dark + Description: Dark theme as seen on github.com + Author: github.com + Maintainer: @Hirse + Updated: 2021-05-15 + + Outdated base version: https://github.com/primer/github-syntax-dark + Current colors taken from GitHub's CSS +*/ + +.dark .hljs { + color: #c9d1d9; + background: #0d1117; +} + +.dark .hljs-doctag, +.dark .hljs-keyword, +.dark .hljs-meta .hljs-keyword, +.dark .hljs-template-tag, +.dark .hljs-template-variable, +.dark .hljs-type, +.dark .hljs-variable.language_ { + /* prettylights-syntax-keyword */ + color: #ff7b72; +} + +.dark .hljs-title, +.dark .hljs-title.class_, +.dark .hljs-title.class_.inherited__, +.dark .hljs-title.function_ { + /* prettylights-syntax-entity */ + color: #d2a8ff; +} + +.dark .hljs-attr, +.dark .hljs-attribute, +.dark .hljs-literal, +.dark .hljs-meta, +.dark .hljs-number, +.dark .hljs-operator, +.dark .hljs-variable, +.dark .hljs-selector-attr, +.dark .hljs-selector-class, +.dark .hljs-selector-id { + /* prettylights-syntax-constant */ + color: #79c0ff; +} + +.dark .hljs-regexp, +.dark .hljs-string, +.dark .hljs-meta .hljs-string { + /* prettylights-syntax-string */ + color: #a5d6ff; +} + +.dark .hljs-built_in, +.dark .hljs-symbol { + /* prettylights-syntax-variable */ + color: #ffa657; +} + +.dark .hljs-comment, +.dark .hljs-code, +.dark .hljs-formula { + /* prettylights-syntax-comment */ + color: #8b949e; +} + +.dark .hljs-name, +.dark .hljs-quote, +.dark .hljs-selector-tag, +.dark .hljs-selector-pseudo { + /* prettylights-syntax-entity-tag */ + color: #7ee787; +} + +.dark .hljs-subst { + /* prettylights-syntax-storage-modifier-import */ + color: #c9d1d9; +} + +.dark .hljs-section { + /* prettylights-syntax-markup-heading */ + color: #1f6feb; + font-weight: bold; +} + +.dark .hljs-bullet { + /* prettylights-syntax-markup-list */ + color: #f2cc60; +} + +.dark .hljs-emphasis { + /* prettylights-syntax-markup-italic */ + color: #c9d1d9; + font-style: italic; +} + +.dark .hljs-strong { + /* prettylights-syntax-markup-bold */ + color: #c9d1d9; + font-weight: bold; +} + +.dark .hljs-addition { + /* prettylights-syntax-markup-inserted */ + color: #aff5b4; + background-color: #033a16; +} + +.dark .hljs-deletion { + /* prettylights-syntax-markup-deleted */ + color: #ffdcd7; + background-color: #67060c; +} + +.dark .hljs-char.escape_, +.dark .hljs-link, +.dark .hljs-params, +.dark .hljs-property, +.dark .hljs-punctuation, +.dark .hljs-tag { + /* purposely ignored */ +} diff --git a/docs/theme/index.hbs b/docs/theme/index.hbs index 8976b54bd96014..c4154b46d33b77 100644 --- a/docs/theme/index.hbs +++ b/docs/theme/index.hbs @@ -1,5 +1,5 @@ - + @@ -56,13 +56,15 @@ var default_theme = window.matchMedia("(prefers-color-scheme: dark)").matches ? "{{ preferred_dark_theme }}" : "{{ default_theme }}"; - + + + diff --git a/docs/theme/page-toc.css b/docs/theme/page-toc.css index dacd61a09b685a..af9b2fbbe1202c 100644 --- a/docs/theme/page-toc.css +++ b/docs/theme/page-toc.css @@ -74,6 +74,6 @@ margin-bottom: 12px; padding-left: 12px; font-size: 1.4rem; - color: #000; + color: var(--full-contrast); } } diff --git a/docs/theme/plugins.css b/docs/theme/plugins.css index 9deee5d5baf9ff..9d5d09fe736a96 100644 --- a/docs/theme/plugins.css +++ b/docs/theme/plugins.css @@ -1,5 +1,5 @@ kbd.keybinding { - background-color: #f0f0f0; + background-color: var(--keybinding-bg); padding: 2px 4px; border-radius: 3px; font-family: monospace; diff --git a/docs/theme/plugins.js b/docs/theme/plugins.js index eee842f15a9f44..76a295353f7abc 100644 --- a/docs/theme/plugins.js +++ b/docs/theme/plugins.js @@ -48,3 +48,65 @@ console.log("Operating System:", os); // Start the process from the body walkDOM(document.body); })(); + +function darkModeToggle() { + var html = document.documentElement; + var themeToggleButton = document.getElementById("theme-toggle"); + var themePopup = document.getElementById("theme-list"); + var themePopupButtons = themePopup.querySelectorAll("button"); + + function setTheme(theme) { + html.setAttribute("data-theme", theme); + html.setAttribute("data-color-scheme", theme); + html.className = theme; + localStorage.setItem("mdbook-theme", theme); + + // Force a repaint to ensure the changes take effect in the client immediately + document.body.style.display = "none"; + document.body.offsetHeight; + document.body.style.display = ""; + } + + themeToggleButton.addEventListener("click", function (event) { + event.preventDefault(); + themePopup.style.display = + themePopup.style.display === "block" ? "none" : "block"; + }); + + themePopupButtons.forEach(function (button) { + button.addEventListener("click", function () { + setTheme(this.id); + themePopup.style.display = "none"; + }); + }); + + document.addEventListener("click", function (event) { + if ( + !themePopup.contains(event.target) && + !themeToggleButton.contains(event.target) + ) { + themePopup.style.display = "none"; + } + }); + + // Set initial theme + var currentTheme = localStorage.getItem("mdbook-theme"); + if (currentTheme) { + setTheme(currentTheme); + } else { + // If no theme is set, use the system's preference + var systemPreference = window.matchMedia("(prefers-color-scheme: dark)") + .matches + ? "dark" + : "light"; + setTheme(systemPreference); + } + + // Listen for system's preference changes + const darkModeMediaQuery = window.matchMedia("(prefers-color-scheme: dark)"); + darkModeMediaQuery.addEventListener("change", function (e) { + if (!localStorage.getItem("mdbook-theme")) { + setTheme(e.matches ? "dark" : "light"); + } + }); +} diff --git a/extensions/EXTRACTION.md b/extensions/EXTRACTION.md new file mode 100644 index 00000000000000..fe112e5b41d0fa --- /dev/null +++ b/extensions/EXTRACTION.md @@ -0,0 +1,88 @@ +# Extracting an extension to dedicated repo + +These are some notes of how to extract an extension from the main zed repository and generate a new repository which preserves the history as best as possible. In the this example we will be extracting the `ruby` extension, substitute as appropriate. + +## Pre-requisites + +Install [git-filter-repo](https://github.com/newren/git-filter-repo/blob/main/INSTALL.md): + +``` +brew install git-filter-repo +``` + +## Process + +1. Create an expressions.txt file somewhere (e.g. `~/projects/expressions.txt`) + +``` +ruby: ==> +extension: ==> +chore: ==> +zed_extension_api: ==> +regex:(?zed-industries/zed\1 +``` + +This file takes the form of `patern==>replacement`, where the replacement is optional. +Note whitespace matters so `ruby: ==>` is removing the `ruby:` prefix from a commit messages and adding a space after `==> ` means the replacement begins with a space. Regex capture groups are numbered `\1`, `\2`, etc. + +See: [Git Filter Repo Docs](https://htmlpreview.github.io/?https://github.com/newren/git-filter-repo/blob/docs/html/git-filter-repo.html) for more. + +2. Create a clean clone the zed repository, delete tags, delete branches and do the work. + +> **Note** +> If you get `zsh: command not found: #` errors, run: +> `setopt interactive_comments && echo "setopt interactive_comments" >> ~/.zshrc` + +```sh +rm -rf zed3 +git clone --single-branch --no-tags git@github.com:zed-industries/zed.git zed3 +cd zed3 + +# This removes the LICENSE symlink +git filter-repo --invert-paths --path extensions/ruby/LICENSE-APACHE + +git filter-repo \ + --use-mailmap \ + --subdirectory-filter extensions/ruby/ \ + --path LICENSE-APACHE \ + --replace-message ~/projects/expressions.txt +``` + +3. Review the commits. + +This is your last chance to make any modifications. +If you don't fix it now, it'll be wrong forever. + +For example, a previous commit message was `php/ruby: bump version to 0.0.5` +which was replaced with `php/bump version to 0.0.5` +so I added a new line to expressions.txt with `php/==>` +and next run it became `bump version to 0.0.5`. + +4. [Optional] Generate tags + +You can always add tags later, but it's a nice touch. + +Show you all commits that mention a version number: + +```sh +git log --grep="(\d+\.\d+\.\d+\.)" --perl-regexp --oneline --reverse +``` + +Then just: +``` +git tag v0.0.2 abcd1234 +git tag v0.0.3 deadbeef +``` + +Usually the initial extraction didn't mention a version number so you can just do that one manually. + +4. Push to the new repo + +Create a new empty repo on github under the [zed-extensions](https://github.com/zed-extensions) organization. + +``` +git remote add origin git@github.com:zed-extensions/ruby +git push origin main --tags +``` + +5. [Optional] diff --git a/extensions/README.md b/extensions/README.md new file mode 100644 index 00000000000000..c677e0b909c0a3 --- /dev/null +++ b/extensions/README.md @@ -0,0 +1,63 @@ +# Zed Extensions + +This directory contains extensions for Zed that are largely maintained by the Zed team. They currently live in the Zed repository for ease of maintenance. + +If you are looking for the Zed extension registry, see the [`zed-industries/extensions`](https://github.com/zed-industries/extensions) repo. + +## Structure + +Currently, Zed includes support for a number of languages without requiring installing an extension. Those languages can be found under [`crates/languages/src`](https://github.com/zed-industries/zed/tree/main/crates/languages/src). + +Support for all other languages is done via extensions. This directory ([extensions/](https://github.com/zed-industries/zed/tree/main/extensions/)) contains a number of officially maintained extensions. These extensions use the same [zed_extension_api](https://docs.rs/zed_extension_api/latest/zed_extension_api/) available to all [Zed Extensions](https://zed.dev/extensions) for providing [language servers](https://zed.dev/docs/extensions/languages#language-servers), [tree-sitter grammars](https://zed.dev/docs/extensions/languages#grammar) and [tree-sitter queries](https://zed.dev/docs/extensions/languages#tree-sitter-queries). + +## Dev Extensions + +See the docs for [Developing an Extension Locally](https://zed.dev/docs/extensions/developing-extensions#developing-an-extension-locally) for how to work with one of these extensions. + +## Updating + +> [!NOTE] +> This update process is usually handled by Zed staff. +> Community contributors should just submit a PR (step 1) and we'll take it from there. + +The process for updating an extension in this directory has three parts. + +1. Create a PR with your changes. (Merge it) +2. Bump the extension version in: + + - extensions/{language_name}/extension.toml + - extensions/{language_name}/Cargo.toml + - Cargo.lock + + You can do this manually, or with a script: + + ```sh + # Output the current version for a given language + ./script/language-extension-version + + # Update the version in `extension.toml` and `Cargo.toml` and trigger a `cargo check` + ./script/language-extension-version + ``` + + Commit your changes to a branch, push a PR and merge it. + +3. Open a PR to [`zed-industries/extensions`](https://github.com/zed-industries/extensions) repo that updates the extension in question + +Edit [`extensions.toml`](https://github.com/zed-industries/extensions/blob/main/extensions.toml) in the extensions repo to reflect the new version you set above and update the submodule latest Zed commit. + +```sh +# Go into your clone of the extensions repo +cd ../extensions + +# Update +git checkout main +git pull +just init-submodule extensions/zed + +# Update the Zed submodule +cd extensions/zed +git checkout main +git pull +cd - +git add extensions.toml extensions/zed +``` diff --git a/extensions/astro/Cargo.toml b/extensions/astro/Cargo.toml index 11de7b22c57f13..755896125c8010 100644 --- a/extensions/astro/Cargo.toml +++ b/extensions/astro/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_astro" -version = "0.1.0" +version = "0.1.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/astro/extension.toml b/extensions/astro/extension.toml index 44f0036bd6ebe1..03183e51e83477 100644 --- a/extensions/astro/extension.toml +++ b/extensions/astro/extension.toml @@ -1,7 +1,7 @@ id = "astro" name = "Astro" description = "Astro support." -version = "0.1.0" +version = "0.1.1" schema_version = 1 authors = ["Alvaro Gaona ", "0xk1f0 "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/astro/languages/astro/highlights.scm b/extensions/astro/languages/astro/highlights.scm index 491e8cc337c4ca..a565e22b6e2275 100644 --- a/extensions/astro/languages/astro/highlights.scm +++ b/extensions/astro/languages/astro/highlights.scm @@ -1,6 +1,6 @@ (tag_name) @tag (erroneous_end_tag_name) @keyword -(doctype) @constant +(doctype) @tag.doctype (attribute_name) @property (attribute_value) @string (comment) @comment diff --git a/extensions/csharp/languages/csharp/config.toml b/extensions/csharp/languages/csharp/config.toml index fd0e13b6c2d652..8f07b45e3bcff2 100644 --- a/extensions/csharp/languages/csharp/config.toml +++ b/extensions/csharp/languages/csharp/config.toml @@ -2,7 +2,7 @@ name = "CSharp" code_fence_block_name = "csharp" grammar = "c_sharp" path_suffixes = ["cs"] -line_comments = ["// "] +line_comments = ["// ", "/// "] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, diff --git a/extensions/dart/Cargo.toml b/extensions/dart/Cargo.toml index ecc2328083a307..3d79e104c105c6 100644 --- a/extensions/dart/Cargo.toml +++ b/extensions/dart/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_dart" -version = "0.0.3" +version = "0.1.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/dart/extension.toml b/extensions/dart/extension.toml index 6d87957fc6e58e..5ea8c37c2f917f 100644 --- a/extensions/dart/extension.toml +++ b/extensions/dart/extension.toml @@ -1,9 +1,9 @@ id = "dart" name = "Dart" description = "Dart support." -version = "0.0.3" +version = "0.1.1" schema_version = 1 -authors = ["Abdullah Alsigar ", "Flo "] +authors = ["Abdullah Alsigar ", "Flo ", "ybbond "] repository = "https://github.com/zed-industries/zed" [language_servers.dart] diff --git a/extensions/dart/languages/dart/indents.scm b/extensions/dart/languages/dart/indents.scm index 4d6f8c1cb75b69..112b414aa45f27 100644 --- a/extensions/dart/languages/dart/indents.scm +++ b/extensions/dart/languages/dart/indents.scm @@ -1,18 +1,3 @@ -(class_definition - "class" @context - name: (_) @name) @item - -(function_signature - name: (_) @name) @item - -(getter_signature - "get" @context - name: (_) @name) @item - -(setter_signature - "set" @context - name: (_) @name) @item - -(enum_declaration - "enum" @context - name: (_) @name) @item +(_ "[" "]" @end) @indent +(_ "{" "}" @end) @indent +(_ "(" ")" @end) @indent diff --git a/extensions/dart/src/dart.rs b/extensions/dart/src/dart.rs index e541846256fcbd..38a2cf25a690f5 100644 --- a/extensions/dart/src/dart.rs +++ b/extensions/dart/src/dart.rs @@ -3,8 +3,47 @@ use zed::settings::LspSettings; use zed::{CodeLabel, CodeLabelSpan}; use zed_extension_api::{self as zed, serde_json, Result}; +struct DartBinary { + pub path: String, + pub args: Option>, +} + struct DartExtension; +impl DartExtension { + fn language_server_binary( + &mut self, + _language_server_id: &zed::LanguageServerId, + worktree: &zed::Worktree, + ) -> Result { + let binary_settings = LspSettings::for_worktree("dart", worktree) + .ok() + .and_then(|lsp_settings| lsp_settings.binary); + let binary_args = binary_settings + .as_ref() + .and_then(|binary_settings| binary_settings.arguments.clone()); + + if let Some(path) = binary_settings.and_then(|binary_settings| binary_settings.path) { + return Ok(DartBinary { + path, + args: binary_args, + }); + } + + if let Some(path) = worktree.which("dart") { + return Ok(DartBinary { + path, + args: binary_args, + }); + } + + Err( + "dart must be installed from dart.dev/get-dart or pointed to by the LSP binary settings" + .to_string(), + ) + } +} + impl zed::Extension for DartExtension { fn new() -> Self { Self @@ -12,16 +51,16 @@ impl zed::Extension for DartExtension { fn language_server_command( &mut self, - _language_server_id: &zed::LanguageServerId, + language_server_id: &zed::LanguageServerId, worktree: &zed::Worktree, ) -> Result { - let path = worktree - .which("dart") - .ok_or_else(|| "dart must be installed from dart.dev/get-dart".to_string())?; + let dart_binary = self.language_server_binary(language_server_id, worktree)?; Ok(zed::Command { - command: path, - args: vec!["language-server".to_string(), "--protocol=lsp".to_string()], + command: dart_binary.path, + args: dart_binary.args.unwrap_or_else(|| { + vec!["language-server".to_string(), "--protocol=lsp".to_string()] + }), env: Default::default(), }) } diff --git a/extensions/elixir/Cargo.toml b/extensions/elixir/Cargo.toml index 6e132ba6aaef0c..139d21f1c5e895 100644 --- a/extensions/elixir/Cargo.toml +++ b/extensions/elixir/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_elixir" -version = "0.0.9" +version = "0.1.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/elixir/extension.toml b/extensions/elixir/extension.toml index a21e01c4700998..ba8a1f66872659 100644 --- a/extensions/elixir/extension.toml +++ b/extensions/elixir/extension.toml @@ -1,7 +1,7 @@ id = "elixir" name = "Elixir" description = "Elixir support." -version = "0.0.9" +version = "0.1.1" schema_version = 1 authors = ["Marshall Bowers "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/elixir/languages/elixir/outline.scm b/extensions/elixir/languages/elixir/outline.scm index 12a1a25e5f12d9..2e864136fc56a7 100644 --- a/extensions/elixir/languages/elixir/outline.scm +++ b/extensions/elixir/languages/elixir/outline.scm @@ -3,6 +3,16 @@ (arguments (alias) @name) (#match? @context "^(defmodule|defprotocol)$")) @item +(call + target: (identifier) @context + (arguments (_) @name)? + (#match? @context "^(setup|setup_all)$")) @item + +(call + target: (identifier) @context + (arguments (string) @name) + (#match? @context "^(describe|test)$")) @item + (unary_operator operator: "@" @name operand: (call diff --git a/extensions/elixir/languages/heex/highlights.scm b/extensions/elixir/languages/heex/highlights.scm index 5252b71facd533..9662c955242be3 100644 --- a/extensions/elixir/languages/heex/highlights.scm +++ b/extensions/elixir/languages/heex/highlights.scm @@ -27,7 +27,7 @@ "=" @operator ; HEEx inherits the DOCTYPE tag from HTML -(doctype) @constant +(doctype) @tag.doctype (comment) @comment diff --git a/extensions/erlang/Cargo.toml b/extensions/erlang/Cargo.toml index 1ac6b4b1b641fc..50673448963bf9 100644 --- a/extensions/erlang/Cargo.toml +++ b/extensions/erlang/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_erlang" -version = "0.0.1" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/erlang/extension.toml b/extensions/erlang/extension.toml index 863da32dfa71a3..23c7cf6d4a97d0 100644 --- a/extensions/erlang/extension.toml +++ b/extensions/erlang/extension.toml @@ -1,7 +1,7 @@ id = "erlang" name = "Erlang" description = "Erlang support." -version = "0.0.1" +version = "0.1.0" schema_version = 1 authors = ["Dairon M ", "Fabian Bergström "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/gleam/Cargo.toml b/extensions/gleam/Cargo.toml deleted file mode 100644 index 7008c6e146a46e..00000000000000 --- a/extensions/gleam/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "zed_gleam" -version = "0.2.0" -edition = "2021" -publish = false -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/gleam.rs" -crate-type = ["cdylib"] - -[dependencies] -html_to_markdown = "0.1.0" -zed_extension_api = "0.1.0" diff --git a/extensions/gleam/extension.toml b/extensions/gleam/extension.toml deleted file mode 100644 index 7cedbca5d463c8..00000000000000 --- a/extensions/gleam/extension.toml +++ /dev/null @@ -1,21 +0,0 @@ -id = "gleam" -name = "Gleam" -description = "Gleam support." -version = "0.2.0" -schema_version = 1 -authors = ["Marshall Bowers "] -repository = "https://github.com/zed-industries/zed" - -[language_servers.gleam] -name = "Gleam LSP" -language = "Gleam" - -[grammars.gleam] -repository = "https://github.com/gleam-lang/tree-sitter-gleam" -commit = "426e67087fd62be5f4533581b5916b2cf010fb5b" - -[slash_commands.gleam-project] -description = "Returns information about the current Gleam project." -requires_argument = false - -[indexed_docs_providers.gleam-hexdocs] diff --git a/extensions/gleam/languages/gleam/config.toml b/extensions/gleam/languages/gleam/config.toml deleted file mode 100644 index 51874945e2de6b..00000000000000 --- a/extensions/gleam/languages/gleam/config.toml +++ /dev/null @@ -1,12 +0,0 @@ -name = "Gleam" -grammar = "gleam" -path_suffixes = ["gleam"] -line_comments = ["// ", "/// "] -autoclose_before = ";:.,=}])>" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string", "comment"] }, -] -tab_size = 2 diff --git a/extensions/gleam/languages/gleam/highlights.scm b/extensions/gleam/languages/gleam/highlights.scm deleted file mode 100644 index 4b85b88d0151a1..00000000000000 --- a/extensions/gleam/languages/gleam/highlights.scm +++ /dev/null @@ -1,130 +0,0 @@ -; Comments -(module_comment) @comment -(statement_comment) @comment -(comment) @comment - -; Constants -(constant - name: (identifier) @constant) - -; Variables -(identifier) @variable -(discard) @comment.unused - -; Modules -(module) @module -(import alias: (identifier) @module) -(remote_type_identifier - module: (identifier) @module) -(remote_constructor_name - module: (identifier) @module) -((field_access - record: (identifier) @module - field: (label) @function) - (#is-not? local)) - -; Functions -(unqualified_import (identifier) @function) -(unqualified_import "type" (type_identifier) @type) -(unqualified_import (type_identifier) @constructor) -(function - name: (identifier) @function) -(external_function - name: (identifier) @function) -(function_parameter - name: (identifier) @variable.parameter) -((function_call - function: (identifier) @function) - (#is-not? local)) -((binary_expression - operator: "|>" - right: (identifier) @function) - (#is-not? local)) - -; "Properties" -; Assumed to be intended to refer to a name for a field; something that comes -; before ":" or after "." -; e.g. record field names, tuple indices, names for named arguments, etc -(label) @property -(tuple_access - index: (integer) @property) - -; Attributes -(attribute - "@" @attribute - name: (identifier) @attribute) - -(attribute_value (identifier) @constant) - -; Type names -(remote_type_identifier) @type -(type_identifier) @type - -; Data constructors -(constructor_name) @constructor - -; Literals -(string) @string -((escape_sequence) @warning - ; Deprecated in v0.33.0-rc2: - (#eq? @warning "\\e")) -(escape_sequence) @string.escape -(bit_string_segment_option) @function.builtin -(integer) @number -(float) @number - -; Reserved identifiers -; TODO: when tree-sitter supports `#any-of?` in the Rust bindings, -; refactor this to use `#any-of?` rather than `#match?` -((identifier) @warning - (#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$")) - -; Keywords -[ - (visibility_modifier) ; "pub" - (opacity_modifier) ; "opaque" - "as" - "assert" - "case" - "const" - ; DEPRECATED: 'external' was removed in v0.30. - "external" - "fn" - "if" - "import" - "let" - "panic" - "todo" - "type" - "use" -] @keyword - -; Operators -(binary_expression - operator: _ @operator) -(boolean_negation "!" @operator) -(integer_negation "-" @operator) - -; Punctuation -[ - "(" - ")" - "[" - "]" - "{" - "}" - "<<" - ">>" -] @punctuation.bracket -[ - "." - "," - ;; Controversial -- maybe some are operators? - ":" - "#" - "=" - "->" - ".." - "-" - "<-" -] @punctuation.delimiter diff --git a/extensions/gleam/languages/gleam/indents.scm b/extensions/gleam/languages/gleam/indents.scm deleted file mode 100644 index 112b414aa45f27..00000000000000 --- a/extensions/gleam/languages/gleam/indents.scm +++ /dev/null @@ -1,3 +0,0 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent diff --git a/extensions/gleam/languages/gleam/outline.scm b/extensions/gleam/languages/gleam/outline.scm deleted file mode 100644 index f0a7b127985305..00000000000000 --- a/extensions/gleam/languages/gleam/outline.scm +++ /dev/null @@ -1,33 +0,0 @@ -(external_type - (visibility_modifier)? @context - "type" @context - (type_name) @name) @item - -(type_definition - (visibility_modifier)? @context - (opacity_modifier)? @context - "type" @context - (type_name) @name) @item - -(data_constructor - (constructor_name) @name) @item - -(data_constructor_argument - (label) @name) @item - -(type_alias - (visibility_modifier)? @context - "type" @context - (type_name) @name) @item - -(function - (visibility_modifier)? @context - "fn" @context - name: (_) @name) @item - -(constant - (visibility_modifier)? @context - "const" @context - name: (_) @name) @item - -(statement_comment) @annotation diff --git a/extensions/gleam/languages/gleam/runnables.scm b/extensions/gleam/languages/gleam/runnables.scm deleted file mode 100644 index b0b37b11a44a3f..00000000000000 --- a/extensions/gleam/languages/gleam/runnables.scm +++ /dev/null @@ -1,25 +0,0 @@ -; Functions with names ending in `_test`. -; This matches the standalone test style used by Startest and Gleeunit. -( - ( - (function name: (_) @run - (#match? @run ".*_test$")) - ) @gleam-test - (#set! tag gleam-test) -) - - -; `describe` API for Startest. -( - (function_call - function: (_) @name - (#any-of? @name "describe" "it") - arguments: (arguments - . - (argument - value: (string (quoted_content) @run) - ) - ) - ) - (#set! tag gleam-test) -) @gleam-test diff --git a/extensions/gleam/languages/gleam/tasks.json b/extensions/gleam/languages/gleam/tasks.json deleted file mode 100644 index af1e8ca7b67060..00000000000000 --- a/extensions/gleam/languages/gleam/tasks.json +++ /dev/null @@ -1,13 +0,0 @@ -[ - { - "label": "gleam test", - "command": "gleam", - "args": ["test"] - }, - { - "label": "gleam test $ZED_SYMBOL", - "command": "gleam", - "args": ["test", "--", "--test-name-filter=$ZED_SYMBOL"], - "tags": ["gleam-test"] - } -] diff --git a/extensions/gleam/packages.txt b/extensions/gleam/packages.txt deleted file mode 100644 index 04f0309e66476b..00000000000000 --- a/extensions/gleam/packages.txt +++ /dev/null @@ -1,581 +0,0 @@ -# The list of Gleam packages. -# Sourced from `https://packages.gleam.run/packages.sqlite`. -act -adglent -ag_html -aham -akaridb -alanttest1 -alpaca -amf0 -amnesiac -antigone -apollo -aragorn2 -arcana_signals -arctic -argamak -argus -argv -ask -asterix -atomic_array -aws4_request -bare_package1 -bare_package_one -bare_package_two -based -based_pg -based_sqlite -beecrypt -bidict -bigben -bigi -binary_search -birdie -birl -biscotto -bison -blah -blask -bliss -bravo -bungle -bytesize -cactus -cake -carpenter -catppuccin -cave3dplus -cgi -chatbot -check_maybe_div_by_zero -chip -chomp -chrobot -chromatic -classify -cleam -collatz -colored -colours -comet -commonmark -conllu -context_fp_gleam -conversation -cors_builder -cosepo -cosmos -counter -crabbucket_pgo -crabbucket_redis -crossbar -css_select -cymbal -dahlia -dbots -decepticon -decipher -decode -dedent -defangle -defer_g -delay -dew -dig -discord_gleam -domu -dot_env -dotenv_gleam -dove -ecoji -edit_distance -efetch -email -embeds -emel -envoy -esgleam -espresso -espresso_pgo_wrapper -eval -event_hub -eventsourcing -eventsourcing_postgres -eventsourcing_sqlite -exception -exercism_test_runner -facet -facquest -falala -falcon -feather -fetch_event -ffmpeg -fibo -file_streams -filepath -filespy -finch_gleam -first_gleam_publish_package -flash -fluoresce -fmglee -fmt -for_the_crows -form_coder -formal -fp -fp2 -fp2_gleam -fp_gl -fresnel -fswalk -functx -funtil -gacache -galant -gap -garnet_tool -gary -gbase32_clockwork -gcalc -gchess -gemqtt -gen_core_erlang -gen_gleam -geny -germinal -ggleam -gild -gild_frontend -gip -gjwt -gl -glacier -glacier_gleeunit -gladvent -glailglind -glam -glame -glaml -glance -glance_printer -glanoid -glare -glatch -glats -glatus -glcode -gleaf -gleam -gleam_bbmustache -gleam_bitwise -gleam_bson -gleam_community_ansi -gleam_community_colour -gleam_community_maths -gleam_community_path -gleam_cors -gleam_cowboy -gleam_cowboy_websockets -gleam_crypto -gleam_deno -gleam_dotenv -gleam_elli -gleam_email -gleam_erlang -gleam_erlexec -gleam_fetch -gleam_gun -gleam_hackney -gleam_hexpm -gleam_html -gleam_http -gleam_httpc -gleam_javascript -gleam_json -gleam_module_javascript_test -gleam_mongo -gleam_nodejs -gleam_os_mon -gleam_otp -gleam_package_interface -gleam_pgo -gleam_qs -gleam_sendgrid -gleam_stats -gleam_stdlib -gleam_synapses -gleam_tailwind -gleam_tcp -gleam_test -gleam_toml -gleam_xml -gleam_yaml -gleam_zlists -gleambox -gleamix -gleamql -gleamsver -gleamy_bench -gleamy_structures -gleamyshell -gleanix -glearray -gleastsq -gleative -gleb128 -glector -gledis -gledo -gleebor -gleenix -gleepl -gleescript -gleesend -gleeunit -gleez -gleither -glemini -glemo -glemplate -glemtext -glen -glen_node -glency -glentities -glenv -glenvy -glerd -glerd_json -glerd_valid -glerm -gleroglero -glesha -glesha2 -glevatar -glevenshtein -glex -glexec -glexer -glexif -glib -gliberapay -glibsql -gliew -glimiter -glimmer -glimt -gling -glint -glisbn -glisdigit -glisten -glistix_gleeunit -glistix_nix -glitch -glitter -glittr -glitzer -gliua -globe -glog -glome -gloml -glomp -gloom -glormat -gloss -glotel -glove -glow -glow_auth -glubs -glubsub -glucose -glue -gluid -gluon -gluple -glv8 -glx -glychee -glyph -glyph_codegen -glzoneinfo -gmysql -go_over -gopenai -gpsd_json -gpxb -grammy -gramps -graph -grille_pain -gripe -gserde -gstripe -gsv -gtempo -gts -gtui -gu -gwitch -gwr -gwt -gxid -gzlib -halo -handles -hardcache -hello_joe -howdy -howdy_authentication_cookies -howdy_uuid -htmb -htmgrrrl -html_components -html_dsl -html_lustre_converter -html_parser -htmz -httpp -hug -humanise -hyphenation -ids -ieee_float -illustrious -immutable_lru -integer_complexity -ior -iox -iso_8859 -ivy -jackson -jasper -javascript_dom_parser -jbs -jot -json_canvas -juno -justin -keccak_gleam -kick -kielet -kirala_bbmarkdown -kirala_l4u -kirala_markdown -kreator -libsql -lite_fs -logging -lotta -lumi -lustre -lustre_animation -lustre_carousel -lustre_dev_tools -lustre_hash_state -lustre_http -lustre_hx -lustre_limiter -lustre_routed -lustre_ssg -lustre_transition -lustre_ui -lustre_virtual_list -lustre_websocket -lzf_gleam -marceau -mat -meadow -melon -midas -migrant -mineflayer -minigen -mist -mockth -modem -monies -morse_code_translator -mote -mug -mumu -mungo -nakai -nanoworker -nbeet -nerf -nessie -nessie_cluster -ngs -nibble -nimiq_gleam -node_socket_client -node_tags -non_empty_list -novdom -novdom_dev_tools -novdom_testing -observatory -open_color -openfeature -opt_args_with_defs_for_gleam -oteap -outcome -outil -owoify_gleam -p5js_gleam -palindrome -panel -parallel_map -parser_gleam -party -parz -pb_lite -pears -peggy -phonetic_gleam -phony -phosphor_lustre -pickle -pika_id -pine -pink -plex_pin_auth -plinth -plunk -pngleam -pojo -pona -postgresql_protocol -pprint -prequel -pretty_diff -priorityq -prng -process_groups -process_waiter -processgroups -promgleam -psg -puddle -punycode -qcheck -qcheck_gleeunit_utils -qs -queryb -question -rad -rada -radiate -radish -radish_fork -ramble -ranged_int -ranger -rank -react_gleam -reactive_signal -ream -recursive -redraw -redraw_dom -ref -rememo -remote_data -render_md -repeatedly -rizzo -runetracer -scaffold_gleam -scriptorium -sequin -shakespeare -shamir -sheen -shellout -shimmer -showtime -signal -signal_pgo -simple_pubsub -simplifile -singularity -sketch -sketch_css -sketch_lustre -slackin -snag -snowgleam -sol -sparkle -spinner -sprinkle -sprocket -sqlight -squirrel -stacky -staff_ai -starmap -startest -stdin -stego -stoiridh_version -storch -stratus -string_format -sturnidae -sunny -surreal_gleam -survey -swen_jwt -systemd_status -tardis -tcpea -telega -temporary -term_size -testbldr -testcontainers_gleam -the_stars -tinyroute -tom -tote -translate -transparent_http -trie_again -trust -tubes -tulip -tupler -typed_headers -valid -validate_monadic -varasto -vindaloo -vleam -wasmify -weapp -webls -webmidi -wechat_dev_tools -wemote -wimp -wink -wisp -wisp_flash -wolf -worm -wp_tables -xmb -xmleam -xmlm -ygleam -youid -zeptomail -zip_list diff --git a/extensions/gleam/src/gleam.rs b/extensions/gleam/src/gleam.rs deleted file mode 100644 index a40111f79341cf..00000000000000 --- a/extensions/gleam/src/gleam.rs +++ /dev/null @@ -1,249 +0,0 @@ -mod hexdocs; - -use std::fs; -use std::sync::LazyLock; -use zed::lsp::CompletionKind; -use zed::{ - CodeLabel, CodeLabelSpan, KeyValueStore, LanguageServerId, SlashCommand, SlashCommandOutput, - SlashCommandOutputSection, -}; -use zed_extension_api::{self as zed, Result}; - -struct GleamExtension { - cached_binary_path: Option, -} - -impl GleamExtension { - fn language_server_binary_path( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - if let Some(path) = worktree.which("gleam") { - return Ok(path); - } - - if let Some(path) = &self.cached_binary_path { - if fs::metadata(path).map_or(false, |stat| stat.is_file()) { - return Ok(path.clone()); - } - } - - zed::set_language_server_installation_status( - language_server_id, - &zed::LanguageServerInstallationStatus::CheckingForUpdate, - ); - let release = zed::latest_github_release( - "gleam-lang/gleam", - zed::GithubReleaseOptions { - require_assets: true, - pre_release: false, - }, - )?; - - let (platform, arch) = zed::current_platform(); - let asset_name = format!( - "gleam-{version}-{arch}-{os}.tar.gz", - version = release.version, - arch = match arch { - zed::Architecture::Aarch64 => "aarch64", - zed::Architecture::X86 => "x86", - zed::Architecture::X8664 => "x86_64", - }, - os = match platform { - zed::Os::Mac => "apple-darwin", - zed::Os::Linux => "unknown-linux-musl", - zed::Os::Windows => "pc-windows-msvc", - }, - ); - - let asset = release - .assets - .iter() - .find(|asset| asset.name == asset_name) - .ok_or_else(|| format!("no asset found matching {:?}", asset_name))?; - - let version_dir = format!("gleam-{}", release.version); - let binary_path = format!("{version_dir}/gleam"); - - if !fs::metadata(&binary_path).map_or(false, |stat| stat.is_file()) { - zed::set_language_server_installation_status( - language_server_id, - &zed::LanguageServerInstallationStatus::Downloading, - ); - - zed::download_file( - &asset.download_url, - &version_dir, - zed::DownloadedFileType::GzipTar, - ) - .map_err(|e| format!("failed to download file: {e}"))?; - - let entries = - fs::read_dir(".").map_err(|e| format!("failed to list working directory {e}"))?; - for entry in entries { - let entry = entry.map_err(|e| format!("failed to load directory entry {e}"))?; - if entry.file_name().to_str() != Some(&version_dir) { - fs::remove_dir_all(entry.path()).ok(); - } - } - } - - self.cached_binary_path = Some(binary_path.clone()); - Ok(binary_path) - } -} - -impl zed::Extension for GleamExtension { - fn new() -> Self { - Self { - cached_binary_path: None, - } - } - - fn language_server_command( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - Ok(zed::Command { - command: self.language_server_binary_path(language_server_id, worktree)?, - args: vec!["lsp".to_string()], - env: Default::default(), - }) - } - - fn label_for_completion( - &self, - _language_server_id: &LanguageServerId, - completion: zed::lsp::Completion, - ) -> Option { - let name = &completion.label; - let ty = strip_newlines_from_detail(&completion.detail?); - let let_binding = "let a"; - let colon = ": "; - let assignment = " = "; - let call = match completion.kind? { - CompletionKind::Function | CompletionKind::Constructor => "()", - _ => "", - }; - let code = format!("{let_binding}{colon}{ty}{assignment}{name}{call}"); - - Some(CodeLabel { - spans: vec![ - CodeLabelSpan::code_range({ - let start = let_binding.len() + colon.len() + ty.len() + assignment.len(); - start..start + name.len() - }), - CodeLabelSpan::code_range({ - let start = let_binding.len(); - start..start + colon.len() - }), - CodeLabelSpan::code_range({ - let start = let_binding.len() + colon.len(); - start..start + ty.len() - }), - ], - filter_range: (0..name.len()).into(), - code, - }) - } - - fn run_slash_command( - &self, - command: SlashCommand, - _args: Vec, - worktree: Option<&zed::Worktree>, - ) -> Result { - match command.name.as_str() { - "gleam-project" => { - let worktree = worktree.ok_or("no worktree")?; - - let mut text = String::new(); - text.push_str("You are in a Gleam project.\n"); - - if let Ok(gleam_toml) = worktree.read_text_file("gleam.toml") { - text.push_str("The `gleam.toml` is as follows:\n"); - text.push_str(&gleam_toml); - } - - Ok(SlashCommandOutput { - sections: vec![SlashCommandOutputSection { - range: (0..text.len()).into(), - label: "gleam-project".to_string(), - }], - text, - }) - } - command => Err(format!("unknown slash command: \"{command}\"")), - } - } - - fn suggest_docs_packages(&self, provider: String) -> Result, String> { - match provider.as_str() { - "gleam-hexdocs" => { - static GLEAM_PACKAGES: LazyLock> = LazyLock::new(|| { - include_str!("../packages.txt") - .lines() - .filter(|line| !line.starts_with('#')) - .map(|line| line.trim().to_owned()) - .collect() - }); - - Ok(GLEAM_PACKAGES.clone()) - } - _ => Ok(Vec::new()), - } - } - - fn index_docs( - &self, - provider: String, - package: String, - database: &KeyValueStore, - ) -> Result<(), String> { - match provider.as_str() { - "gleam-hexdocs" => hexdocs::index(package, database), - _ => Ok(()), - } - } -} - -zed::register_extension!(GleamExtension); - -/// Removes newlines from the completion detail. -/// -/// The Gleam LSP can return types containing newlines, which causes formatting -/// issues within the Zed completions menu. -fn strip_newlines_from_detail(detail: &str) -> String { - let without_newlines = detail - .replace("->\n ", "-> ") - .replace("\n ", "") - .replace(",\n", ""); - - let comma_delimited_parts = without_newlines.split(','); - comma_delimited_parts - .map(|part| part.trim()) - .collect::>() - .join(", ") -} - -#[cfg(test)] -mod tests { - use crate::strip_newlines_from_detail; - - #[test] - fn test_strip_newlines_from_detail() { - let detail = "fn(\n Selector(a),\n b,\n fn(Dynamic, Dynamic, Dynamic, Dynamic, Dynamic, Dynamic, Dynamic) -> a,\n) -> Selector(a)"; - let expected = "fn(Selector(a), b, fn(Dynamic, Dynamic, Dynamic, Dynamic, Dynamic, Dynamic, Dynamic) -> a) -> Selector(a)"; - assert_eq!(strip_newlines_from_detail(detail), expected); - - let detail = "fn(Selector(a), b, fn(Dynamic, Dynamic, Dynamic, Dynamic, Dynamic, Dynamic) -> a) ->\n Selector(a)"; - let expected = "fn(Selector(a), b, fn(Dynamic, Dynamic, Dynamic, Dynamic, Dynamic, Dynamic) -> a) -> Selector(a)"; - assert_eq!(strip_newlines_from_detail(detail), expected); - - let detail = "fn(\n Method,\n List(#(String, String)),\n a,\n Scheme,\n String,\n Option(Int),\n String,\n Option(String),\n) -> Request(a)"; - let expected = "fn(Method, List(#(String, String)), a, Scheme, String, Option(Int), String, Option(String)) -> Request(a)"; - assert_eq!(strip_newlines_from_detail(detail), expected); - } -} diff --git a/extensions/gleam/src/hexdocs.rs b/extensions/gleam/src/hexdocs.rs deleted file mode 100644 index 1b6b073a61b418..00000000000000 --- a/extensions/gleam/src/hexdocs.rs +++ /dev/null @@ -1,215 +0,0 @@ -use std::cell::RefCell; -use std::collections::BTreeSet; -use std::io::{self, Read}; -use std::rc::Rc; - -use html_to_markdown::markdown::{ - HeadingHandler, ListHandler, ParagraphHandler, StyledTextHandler, TableHandler, -}; -use html_to_markdown::{ - convert_html_to_markdown, HandleTag, HandlerOutcome, HtmlElement, MarkdownWriter, - StartTagOutcome, TagHandler, -}; -use zed_extension_api::{ - http_client::{HttpMethod, HttpRequest, RedirectPolicy}, - KeyValueStore, Result, -}; - -pub fn index(package: String, database: &KeyValueStore) -> Result<()> { - let headers = vec![( - "User-Agent".to_string(), - "Zed (Gleam Extension)".to_string(), - )]; - - let response = HttpRequest::builder() - .method(HttpMethod::Get) - .url(format!("https://hexdocs.pm/{package}")) - .headers(headers.clone()) - .redirect_policy(RedirectPolicy::FollowAll) - .build()? - .fetch()?; - - let (package_root_markdown, modules) = - convert_hexdocs_to_markdown(&mut io::Cursor::new(&response.body))?; - - database.insert(&package, &package_root_markdown)?; - - for module in modules { - let response = HttpRequest::builder() - .method(HttpMethod::Get) - .url(format!("https://hexdocs.pm/{package}/{module}.html")) - .headers(headers.clone()) - .redirect_policy(RedirectPolicy::FollowAll) - .build()? - .fetch()?; - - let (markdown, _modules) = - convert_hexdocs_to_markdown(&mut io::Cursor::new(&response.body))?; - - database.insert(&format!("{module} ({package})"), &markdown)?; - } - - Ok(()) -} - -pub fn convert_hexdocs_to_markdown(html: impl Read) -> Result<(String, Vec)> { - let module_collector = Rc::new(RefCell::new(GleamModuleCollector::new())); - - let mut handlers: Vec = vec![ - module_collector.clone(), - Rc::new(RefCell::new(GleamChromeRemover)), - Rc::new(RefCell::new(NavSkipper::new(ParagraphHandler))), - Rc::new(RefCell::new(NavSkipper::new(HeadingHandler))), - Rc::new(RefCell::new(NavSkipper::new(ListHandler))), - Rc::new(RefCell::new(NavSkipper::new(TableHandler::new()))), - Rc::new(RefCell::new(NavSkipper::new(StyledTextHandler))), - ]; - - let markdown = convert_html_to_markdown(html, &mut handlers) - .map_err(|err| format!("failed to convert docs to Markdown {err}"))?; - - let modules = module_collector - .borrow() - .modules - .iter() - .cloned() - .collect::>(); - - Ok((markdown, modules)) -} - -/// A higher-order handler that skips all content from the `nav`. -/// -/// We still need to traverse the `nav` for collecting information, but -/// we don't want to include any of its content in the resulting Markdown. -pub struct NavSkipper { - handler: T, -} - -impl NavSkipper { - pub fn new(handler: T) -> Self { - Self { handler } - } -} - -impl HandleTag for NavSkipper { - fn should_handle(&self, tag: &str) -> bool { - tag == "nav" || self.handler.should_handle(tag) - } - - fn handle_tag_start( - &mut self, - tag: &HtmlElement, - writer: &mut MarkdownWriter, - ) -> StartTagOutcome { - if writer.is_inside("nav") { - return StartTagOutcome::Continue; - } - - self.handler.handle_tag_start(tag, writer) - } - - fn handle_tag_end(&mut self, tag: &HtmlElement, writer: &mut MarkdownWriter) { - if writer.is_inside("nav") { - return; - } - - self.handler.handle_tag_end(tag, writer) - } - - fn handle_text(&mut self, text: &str, writer: &mut MarkdownWriter) -> HandlerOutcome { - if writer.is_inside("nav") { - return HandlerOutcome::Handled; - } - - self.handler.handle_text(text, writer) - } -} - -pub struct GleamChromeRemover; - -impl HandleTag for GleamChromeRemover { - fn should_handle(&self, tag: &str) -> bool { - matches!( - tag, - "head" | "script" | "style" | "svg" | "header" | "footer" | "a" - ) - } - - fn handle_tag_start( - &mut self, - tag: &HtmlElement, - _writer: &mut MarkdownWriter, - ) -> StartTagOutcome { - match tag.tag() { - "head" | "script" | "style" | "svg" | "header" | "footer" => { - return StartTagOutcome::Skip; - } - "a" => { - if tag.attr("onclick").is_some() { - return StartTagOutcome::Skip; - } - } - _ => {} - } - - StartTagOutcome::Continue - } -} - -pub struct GleamModuleCollector { - modules: BTreeSet, - has_seen_modules_header: bool, -} - -impl GleamModuleCollector { - pub fn new() -> Self { - Self { - modules: BTreeSet::new(), - has_seen_modules_header: false, - } - } - - fn parse_module(tag: &HtmlElement) -> Option { - if tag.tag() != "a" { - return None; - } - - let href = tag.attr("href")?; - if href.starts_with('#') || href.starts_with("https://") || href.starts_with("../") { - return None; - } - - let module_name = href.trim_start_matches("./").trim_end_matches(".html"); - - Some(module_name.to_owned()) - } -} - -impl HandleTag for GleamModuleCollector { - fn should_handle(&self, tag: &str) -> bool { - matches!(tag, "h2" | "a") - } - - fn handle_tag_start( - &mut self, - tag: &HtmlElement, - writer: &mut MarkdownWriter, - ) -> StartTagOutcome { - if tag.tag() == "a" && self.has_seen_modules_header && writer.is_inside("li") { - if let Some(module_name) = Self::parse_module(tag) { - self.modules.insert(module_name); - } - } - - StartTagOutcome::Continue - } - - fn handle_text(&mut self, text: &str, writer: &mut MarkdownWriter) -> HandlerOutcome { - if writer.is_inside("nav") && writer.is_inside("h2") && text == "Modules" { - self.has_seen_modules_header = true; - } - - HandlerOutcome::NoOp - } -} diff --git a/extensions/html/Cargo.toml b/extensions/html/Cargo.toml index 084be13a70b3aa..1182356015db29 100644 --- a/extensions/html/Cargo.toml +++ b/extensions/html/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_html" -version = "0.1.2" +version = "0.1.3" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/html/extension.toml b/extensions/html/extension.toml index 9add7f8289d240..12b1020e3bad14 100644 --- a/extensions/html/extension.toml +++ b/extensions/html/extension.toml @@ -1,7 +1,7 @@ id = "html" name = "HTML" description = "HTML support." -version = "0.1.2" +version = "0.1.3" schema_version = 1 authors = ["Isaac Clayton "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/html/languages/html/highlights.scm b/extensions/html/languages/html/highlights.scm index e2b8e35bf4a790..6bb0c23374b8f4 100644 --- a/extensions/html/languages/html/highlights.scm +++ b/extensions/html/languages/html/highlights.scm @@ -1,6 +1,6 @@ -(tag_name) @keyword +(tag_name) @tag (erroneous_end_tag_name) @keyword -(doctype) @constant +(doctype) @tag.doctype (attribute_name) @property (attribute_value) @string (comment) @comment diff --git a/extensions/lua/Cargo.toml b/extensions/lua/Cargo.toml index ace7f4700caebf..f577ce18712c4b 100644 --- a/extensions/lua/Cargo.toml +++ b/extensions/lua/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_lua" -version = "0.0.3" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/lua/extension.toml b/extensions/lua/extension.toml index cd00bbc7c1ad7a..82026f48ba90dc 100644 --- a/extensions/lua/extension.toml +++ b/extensions/lua/extension.toml @@ -1,7 +1,7 @@ id = "lua" name = "Lua" description = "Lua support." -version = "0.0.3" +version = "0.1.0" schema_version = 1 authors = ["Max Brunsfeld "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/lua/languages/lua/config.toml b/extensions/lua/languages/lua/config.toml index 6c3aee09ea71b2..7ec8ef2f03f549 100644 --- a/extensions/lua/languages/lua/config.toml +++ b/extensions/lua/languages/lua/config.toml @@ -8,6 +8,6 @@ brackets = [ { start = "[", end = "]", close = true, newline = true }, { start = "(", end = ")", close = true, newline = true }, { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = false, newline = false, not_in = ["string"] }, + { start = "'", end = "'", close = true, newline = false, not_in = ["string"] }, ] collapsed_placeholder = "--[ ... ]--" diff --git a/extensions/lua/languages/lua/highlights.scm b/extensions/lua/languages/lua/highlights.scm index 98e2c2eaff6380..7b0b8364ea2d38 100644 --- a/extensions/lua/languages/lua/highlights.scm +++ b/extensions/lua/languages/lua/highlights.scm @@ -196,3 +196,4 @@ (number) @number (string) @string +(escape_sequence) @string.escape diff --git a/extensions/ocaml/Cargo.toml b/extensions/ocaml/Cargo.toml index 6e07327036e82c..6df98bec4cfb83 100644 --- a/extensions/ocaml/Cargo.toml +++ b/extensions/ocaml/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_ocaml" -version = "0.0.2" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/ocaml/extension.toml b/extensions/ocaml/extension.toml index 99f83d126307c2..bff7c380b5523d 100644 --- a/extensions/ocaml/extension.toml +++ b/extensions/ocaml/extension.toml @@ -1,7 +1,7 @@ id = "ocaml" name = "OCaml" description = "OCaml support." -version = "0.0.2" +version = "0.1.0" schema_version = 1 authors = ["Rashid Almheiri <69181766+huwaireb@users.noreply.github.com>"] repository = "https://github.com/zed-industries/zed" @@ -19,3 +19,7 @@ path = "grammars/ocaml" repository = "https://github.com/tree-sitter/tree-sitter-ocaml" commit = "0b12614ded3ec7ed7ab7933a9ba4f695ba4c342e" path = "grammars/interface" + +[grammars.dune] +repository = "https://github.com/WHForks/tree-sitter-dune" +commit = "b3f7882e1b9a1d8811011bf6f0de1c74c9c93949" diff --git a/extensions/ocaml/languages/dune/config.toml b/extensions/ocaml/languages/dune/config.toml new file mode 100644 index 00000000000000..b4f79850b64336 --- /dev/null +++ b/extensions/ocaml/languages/dune/config.toml @@ -0,0 +1,8 @@ +name = "Dune" +grammar = "dune" +path_suffixes = ["dune", "dune-project"] +brackets = [ + { start = "(", end = ")", close = true, newline = true }, + { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] } +] +tab_size = 2 diff --git a/extensions/ocaml/languages/dune/highlights.scm b/extensions/ocaml/languages/dune/highlights.scm new file mode 100644 index 00000000000000..e7a21cd2c51c03 --- /dev/null +++ b/extensions/ocaml/languages/dune/highlights.scm @@ -0,0 +1,5 @@ +(stanza_name) @function +(field_name) @property +(quoted_string) @string +(multiline_string) @string +(action_name) @keyword diff --git a/extensions/ocaml/languages/dune/injections.scm b/extensions/ocaml/languages/dune/injections.scm new file mode 100644 index 00000000000000..654b5b2c13967a --- /dev/null +++ b/extensions/ocaml/languages/dune/injections.scm @@ -0,0 +1,2 @@ +((ocaml_syntax) @injection.content + (#set! injection.language "ocaml")) diff --git a/extensions/perplexity/README.md b/extensions/perplexity/README.md new file mode 100644 index 00000000000000..405356dc535a36 --- /dev/null +++ b/extensions/perplexity/README.md @@ -0,0 +1,43 @@ +# Zed Perplexity Extension + +This example extension adds the `/perplexity` [slash command](https://zed.dev/docs/assistant/commands) to the Zed AI assistant. + +## Usage + +Open the AI Assistant panel (`cmd-r` or `ctrl-r`) and enter: + +``` +/perplexity What's the weather in Boulder, CO tomorrow evening? +``` + +## Development Setup + +1. Install the Rust toolchain and clone the zed repo: + + ``` + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + + mkdir -p ~/code + cd ~/code + git clone https://github.com/zed-industries/zed + ``` + +1. Open Zed +1. Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) +1. Click "Install Dev Extension" +1. Navigate to the "extensions/perplexity" folder inside the zed git repo. +1. Ensure your `PERPLEXITY_API_KEY` environment variable is set (instructions below) + + ```sh + env | grep PERPLEXITY_API_KEY + ``` + +1. Quit and relaunch Zed + +## PERPLEXITY_API_KEY + +This extension requires a Perplexity API key to be available via the `PERPLEXITY_API_KEY` environment variable. + +To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. + +Take your API key and add it to your environment by adding `export PERPLEXITY_API_KEY="pplx-0123456789abcdef..."` to your `~/.zshrc` or `~/.bashrc`. Reload close and reopen your terminal session. Check with `env |grep PERPLEXITY_API_KEY`. diff --git a/extensions/php/Cargo.toml b/extensions/php/Cargo.toml index 91f309d9de07ad..9726c6f0d59e90 100644 --- a/extensions/php/Cargo.toml +++ b/extensions/php/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_php" -version = "0.1.3" +version = "0.2.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/php/extension.toml b/extensions/php/extension.toml index cb30eb5f0cd55f..a59c5bab53784c 100644 --- a/extensions/php/extension.toml +++ b/extensions/php/extension.toml @@ -1,7 +1,7 @@ id = "php" name = "PHP" description = "PHP support." -version = "0.1.3" +version = "0.2.1" schema_version = 1 authors = ["Piotr Osiewicz "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/php/languages/php/highlights.scm b/extensions/php/languages/php/highlights.scm index ab1fdf662cf695..6afeb1090b7a0b 100644 --- a/extensions/php/languages/php/highlights.scm +++ b/extensions/php/languages/php/highlights.scm @@ -133,8 +133,5 @@ "trait" @keyword "try" @keyword "use" @keyword -"var" @keyword "while" @keyword "xor" @keyword -"yield" @keyword -"yield from" @keyword diff --git a/extensions/php/languages/php/indents.scm b/extensions/php/languages/php/indents.scm new file mode 100644 index 00000000000000..e9754690920500 --- /dev/null +++ b/extensions/php/languages/php/indents.scm @@ -0,0 +1 @@ +(_ "{" "}" @end) @indent diff --git a/extensions/php/languages/php/injections.scm b/extensions/php/languages/php/injections.scm index 3b5cd70064c42c..122d0b377af701 100644 --- a/extensions/php/languages/php/injections.scm +++ b/extensions/php/languages/php/injections.scm @@ -5,3 +5,5 @@ ((comment) @content (#match? @content "^/\\*\\*[^*]") (#set! "language" "phpdoc")) + +((heredoc_body) (heredoc_end) @language) @content diff --git a/extensions/php/src/language_servers/intelephense.rs b/extensions/php/src/language_servers/intelephense.rs index 7bd66b24abfaff..23f47ac5c06344 100644 --- a/extensions/php/src/language_servers/intelephense.rs +++ b/extensions/php/src/language_servers/intelephense.rs @@ -1,5 +1,6 @@ use std::{env, fs}; +use zed::{CodeLabel, CodeLabelSpan}; use zed_extension_api::settings::LspSettings; use zed_extension_api::{self as zed, serde_json, LanguageServerId, Result}; @@ -104,4 +105,105 @@ impl Intelephense { "intelephense": settings }))) } + + pub fn label_for_completion(&self, completion: zed::lsp::Completion) -> Option { + let label = &completion.label; + + match completion.kind? { + zed::lsp::CompletionKind::Method => { + // __construct method doesn't have a detail + if let Some(ref detail) = completion.detail { + if detail.is_empty() { + return Some(CodeLabel { + spans: vec![ + CodeLabelSpan::literal(label, Some("function.method".to_string())), + CodeLabelSpan::literal("()", None), + ], + filter_range: (0..label.len()).into(), + code: completion.label, + }); + } + } + + let mut parts = completion.detail.as_ref()?.split(":"); + // E.g., `foo(string $var)` + let name_and_params = parts.next()?; + let return_type = parts.next()?.trim(); + + let (_, params) = name_and_params.split_once("(")?; + let params = params.trim_end_matches(")"); + + Some(CodeLabel { + spans: vec![ + CodeLabelSpan::literal(label, Some("function.method".to_string())), + CodeLabelSpan::literal("(", None), + CodeLabelSpan::literal(params, Some("comment".to_string())), + CodeLabelSpan::literal("): ", None), + CodeLabelSpan::literal(return_type, Some("type".to_string())), + ], + filter_range: (0..label.len()).into(), + code: completion.label, + }) + } + zed::lsp::CompletionKind::Constant | zed::lsp::CompletionKind::EnumMember => { + if let Some(ref detail) = completion.detail { + if !detail.is_empty() { + return Some(CodeLabel { + spans: vec![ + CodeLabelSpan::literal(label, Some("constant".to_string())), + CodeLabelSpan::literal(" ", None), + CodeLabelSpan::literal(detail, Some("comment".to_string())), + ], + filter_range: (0..label.len()).into(), + code: completion.label, + }); + } + } + + Some(CodeLabel { + spans: vec![CodeLabelSpan::literal(label, Some("constant".to_string()))], + filter_range: (0..label.len()).into(), + code: completion.label, + }) + } + zed::lsp::CompletionKind::Property => { + let return_type = completion.detail?; + Some(CodeLabel { + spans: vec![ + CodeLabelSpan::literal(label, Some("attribute".to_string())), + CodeLabelSpan::literal(": ", None), + CodeLabelSpan::literal(return_type, Some("type".to_string())), + ], + filter_range: (0..label.len()).into(), + code: completion.label, + }) + } + zed::lsp::CompletionKind::Variable => { + // See https://www.php.net/manual/en/reserved.variables.php + const SYSTEM_VAR_NAMES: &[&str] = + &["argc", "argv", "php_errormsg", "http_response_header"]; + + let var_name = completion.label.trim_start_matches("$"); + let is_uppercase = var_name + .chars() + .filter(|c| c.is_alphabetic()) + .all(|c| c.is_uppercase()); + let is_system_constant = var_name.starts_with("_"); + let is_reserved = SYSTEM_VAR_NAMES.contains(&var_name); + + let highlight = if is_uppercase || is_system_constant || is_reserved { + Some("comment".to_string()) + } else { + None + }; + + Some(CodeLabel { + spans: vec![CodeLabelSpan::literal(label, highlight)], + filter_range: (0..label.len()).into(), + code: completion.label, + }) + } + _ => None, + } + } } diff --git a/extensions/php/src/php.rs b/extensions/php/src/php.rs index 7157bef07432f8..53b4c299516241 100644 --- a/extensions/php/src/php.rs +++ b/extensions/php/src/php.rs @@ -1,5 +1,6 @@ mod language_servers; +use zed::CodeLabel; use zed_extension_api::{self as zed, serde_json, LanguageServerId, Result}; use crate::language_servers::{Intelephense, Phpactor}; @@ -53,6 +54,19 @@ impl zed::Extension for PhpExtension { Ok(None) } + + fn label_for_completion( + &self, + language_server_id: &zed::LanguageServerId, + completion: zed::lsp::Completion, + ) -> Option { + match language_server_id.as_ref() { + Intelephense::LANGUAGE_SERVER_ID => { + self.intelephense.as_ref()?.label_for_completion(completion) + } + _ => None, + } + } } zed::register_extension!(PhpExtension); diff --git a/extensions/ruby/Cargo.toml b/extensions/proto/Cargo.toml similarity index 81% rename from extensions/ruby/Cargo.toml rename to extensions/proto/Cargo.toml index 2d9ef229af816a..215a09f896c1df 100644 --- a/extensions/ruby/Cargo.toml +++ b/extensions/proto/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "zed_ruby" +name = "zed_proto" version = "0.2.0" edition = "2021" publish = false @@ -9,7 +9,7 @@ license = "Apache-2.0" workspace = true [lib] -path = "src/ruby.rs" +path = "src/proto.rs" crate-type = ["cdylib"] [dependencies] diff --git a/extensions/gleam/LICENSE-APACHE b/extensions/proto/LICENSE-APACHE similarity index 100% rename from extensions/gleam/LICENSE-APACHE rename to extensions/proto/LICENSE-APACHE diff --git a/extensions/proto/extension.toml b/extensions/proto/extension.toml new file mode 100644 index 00000000000000..f26aee7dde5c3c --- /dev/null +++ b/extensions/proto/extension.toml @@ -0,0 +1,15 @@ +id = "proto" +name = "Proto" +description = "Protocol Buffers support." +version = "0.2.0" +schema_version = 1 +authors = ["Zed Industries "] +repository = "https://github.com/zed-industries/zed" + +[grammars.proto] +repository = "https://github.com/zed-industries/tree-sitter-proto" +commit = "0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" + +[language_servers.protobuf-language-server] +name = "Protobuf Language Server" +languages = ["Proto"] diff --git a/crates/languages/src/proto/config.toml b/extensions/proto/languages/proto/config.toml similarity index 97% rename from crates/languages/src/proto/config.toml rename to extensions/proto/languages/proto/config.toml index b8bccfd39b6a39..6d25c23da5dfaa 100644 --- a/crates/languages/src/proto/config.toml +++ b/extensions/proto/languages/proto/config.toml @@ -1,4 +1,4 @@ -name = "proto" +name = "Proto" grammar = "proto" path_suffixes = ["proto"] line_comments = ["// "] diff --git a/crates/languages/src/proto/highlights.scm b/extensions/proto/languages/proto/highlights.scm similarity index 100% rename from crates/languages/src/proto/highlights.scm rename to extensions/proto/languages/proto/highlights.scm diff --git a/crates/languages/src/proto/outline.scm b/extensions/proto/languages/proto/outline.scm similarity index 100% rename from crates/languages/src/proto/outline.scm rename to extensions/proto/languages/proto/outline.scm diff --git a/extensions/proto/src/proto.rs b/extensions/proto/src/proto.rs new file mode 100644 index 00000000000000..c692a0932766b4 --- /dev/null +++ b/extensions/proto/src/proto.rs @@ -0,0 +1,64 @@ +use zed_extension_api::{self as zed, settings::LspSettings, Result}; + +const PROTOBUF_LANGUAGE_SERVER_NAME: &str = "protobuf-language-server"; + +struct ProtobufLanguageServerBinary { + path: String, + args: Option>, +} + +struct ProtobufExtension; + +impl ProtobufExtension { + fn language_server_binary( + &self, + _language_server_id: &zed::LanguageServerId, + worktree: &zed::Worktree, + ) -> Result { + let binary_settings = LspSettings::for_worktree("protobuf-language-server", worktree) + .ok() + .and_then(|lsp_settings| lsp_settings.binary); + let binary_args = binary_settings + .as_ref() + .and_then(|binary_settings| binary_settings.arguments.clone()); + + if let Some(path) = binary_settings.and_then(|binary_settings| binary_settings.path) { + return Ok(ProtobufLanguageServerBinary { + path, + args: binary_args, + }); + } + + if let Some(path) = worktree.which(PROTOBUF_LANGUAGE_SERVER_NAME) { + return Ok(ProtobufLanguageServerBinary { + path, + args: binary_args, + }); + } + + Err(format!("{PROTOBUF_LANGUAGE_SERVER_NAME} not found in PATH",)) + } +} + +impl zed::Extension for ProtobufExtension { + fn new() -> Self { + Self + } + + fn language_server_command( + &mut self, + language_server_id: &zed_extension_api::LanguageServerId, + worktree: &zed_extension_api::Worktree, + ) -> zed_extension_api::Result { + let binary = self.language_server_binary(language_server_id, worktree)?; + Ok(zed::Command { + command: binary.path, + args: binary + .args + .unwrap_or_else(|| vec!["-logs".into(), "".into()]), + env: Default::default(), + }) + } +} + +zed::register_extension!(ProtobufExtension); diff --git a/extensions/racket/languages/racket/highlights.scm b/extensions/racket/languages/racket/highlights.scm index 3caf1d88e97dd0..0b462a9c261f46 100644 --- a/extensions/racket/languages/racket/highlights.scm +++ b/extensions/racket/languages/racket/highlights.scm @@ -4,7 +4,7 @@ (here_string) (byte_string)] @string (regex) @string.regex -(escape_sequence) @escape +(escape_sequence) @string.escape [(comment) (block_comment) diff --git a/extensions/ruby/LICENSE-APACHE b/extensions/ruby/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3affa..00000000000000 --- a/extensions/ruby/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/ruby/extension.toml b/extensions/ruby/extension.toml deleted file mode 100644 index 6862069e7226d9..00000000000000 --- a/extensions/ruby/extension.toml +++ /dev/null @@ -1,31 +0,0 @@ -id = "ruby" -name = "Ruby" -description = "Ruby support." -version = "0.2.0" -schema_version = 1 -authors = ["Vitaly Slobodin "] -repository = "https://github.com/zed-industries/zed" - -[language_servers.solargraph] -name = "Solargraph" -languages = ["Ruby"] - -[language_servers.ruby-lsp] -name = "Ruby LSP" -languages = ["Ruby", "ERB"] - -[language_servers.rubocop] -name = "Rubocop" -languages = ["Ruby"] - -[grammars.ruby] -repository = "https://github.com/tree-sitter/tree-sitter-ruby" -commit = "7dbc1e2d0e2d752577655881f73b4573f3fe85d4" - -[grammars.embedded_template] -repository = "https://github.com/tree-sitter/tree-sitter-embedded-template" -commit = "91fc5ae1140d5c9d922312431f7d251a48d7b8ce" - -[grammars.rbs] -repository = "https://github.com/joker1007/tree-sitter-rbs" -commit = "8d8e65ac3f77fbc9e15b1cdb9f980a3e0ac3ab99" diff --git a/extensions/ruby/languages/erb/config.toml b/extensions/ruby/languages/erb/config.toml deleted file mode 100644 index 5ec987e139acaa..00000000000000 --- a/extensions/ruby/languages/erb/config.toml +++ /dev/null @@ -1,9 +0,0 @@ -name = "ERB" -grammar = "embedded_template" -path_suffixes = ["erb"] -autoclose_before = ">})" -brackets = [ - { start = "<", end = ">", close = true, newline = true }, -] -block_comment = ["<%#", "%>"] -scope_opt_in_language_servers = ["tailwindcss-language-server"] diff --git a/extensions/ruby/languages/erb/highlights.scm b/extensions/ruby/languages/erb/highlights.scm deleted file mode 100644 index 0bf76a7d4916d9..00000000000000 --- a/extensions/ruby/languages/erb/highlights.scm +++ /dev/null @@ -1,12 +0,0 @@ -(comment_directive) @comment - -[ - "<%#" - "<%" - "<%=" - "<%_" - "<%-" - "%>" - "-%>" - "_%>" -] @keyword diff --git a/extensions/ruby/languages/erb/injections.scm b/extensions/ruby/languages/erb/injections.scm deleted file mode 100644 index 7a69a818ef31d7..00000000000000 --- a/extensions/ruby/languages/erb/injections.scm +++ /dev/null @@ -1,7 +0,0 @@ -((code) @content - (#set! "language" "ruby") - (#set! "combined")) - -((content) @content - (#set! "language" "html") - (#set! "combined")) diff --git a/extensions/ruby/languages/rbs/config.toml b/extensions/ruby/languages/rbs/config.toml deleted file mode 100644 index fbb502746bf4cc..00000000000000 --- a/extensions/ruby/languages/rbs/config.toml +++ /dev/null @@ -1,10 +0,0 @@ -name = "RBS" -grammar = "rbs" -path_suffixes = ["rbs"] -autoclose_before = "]})" -brackets = [ - { start = "(", end = ")", close = true, newline = false }, - { start = "{", end = "}", close = true, newline = false }, - { start = "[", end = "]", close = true, newline = false }, -] -line_comments = ["#"] diff --git a/extensions/ruby/languages/rbs/highlights.scm b/extensions/ruby/languages/rbs/highlights.scm deleted file mode 100644 index a5f8db14abeb67..00000000000000 --- a/extensions/ruby/languages/rbs/highlights.scm +++ /dev/null @@ -1,144 +0,0 @@ -; Taken from https://github.com/nvim-treesitter/nvim-treesitter/blob/master/queries/rbs/highlights.scm -; Use directive -(use_clause - [ - (type_name) - (simple_type_name) - ] @type) - -; Builtin constants and Keywords -[ - "true" - "false" -] @boolean - -"nil" @constant.builtin - -[ - "use" - "as" - "module" - "def" - "attr_reader" - "attr_writer" - "attr_accessor" - "end" - "alias" -] @keyword - -[ - "interface" - "type" - "class" -] @keyword.type - -(class_decl - "end" @keyword.type) - -(interface_decl - "end" @keyword.type) - -"def" @keyword.function - -; Members of declaration -[ - "include" - "extend" - "prepend" -] @function.method - -(visibility) @keyword.modifier - -(comment) @comment - -(method_member - (method_name - [ - (identifier) - (constant) - (operator) - (setter) - ] @function.method)) - -[ - (ivar_name) - (cvar_name) -] @variable.member - -(alias_member - (method_name) @function) - -(class_name - (constant) @type) - -(module_name - (constant) @type) - -(interface_name - (interface) @type) - -(alias_name - (identifier) @type) - -(type_variable) @constant - -(namespace - (constant) @module) - -(builtin_type) @type.builtin - -(const_name - (constant) @constant) - -(global_name) @variable - -; Standard Arguments -(parameter - (var_name) @variable.parameter) - -; Keyword Arguments -(keyword) @variable.parameter - -; Self -(self) @variable.builtin - -; Literal -(type - (symbol_literal) @string.special.symbol) - -(type - (string_literal - (escape_sequence) @string.escape)) - -(type - (string_literal) @string) - -(type - (integer_literal) @number) - -; Operators -[ - "=" - "->" - "<" - "**" - "*" - "&" - "|" - "^" -] @operator - -; Punctuation -[ - "(" - ")" - "[" - "]" - "{" - "}" -] @punctuation.bracket - -[ - "," - "." -] @punctuation.delimiter diff --git a/extensions/ruby/languages/rbs/indents.scm b/extensions/ruby/languages/rbs/indents.scm deleted file mode 100644 index b37b378979f1e4..00000000000000 --- a/extensions/ruby/languages/rbs/indents.scm +++ /dev/null @@ -1,14 +0,0 @@ -[ - (class_decl) - (module_decl) - (interface_decl) - (parameters) - (tuple_type) - (record_type) -] @indent.begin - -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent - -(comment) @indent.ignore diff --git a/extensions/ruby/languages/rbs/injections.scm b/extensions/ruby/languages/rbs/injections.scm deleted file mode 100644 index 2f0e58eb643151..00000000000000 --- a/extensions/ruby/languages/rbs/injections.scm +++ /dev/null @@ -1,2 +0,0 @@ -((comment) @injection.content - (#set! injection.language "comment")) diff --git a/extensions/ruby/languages/ruby/brackets.scm b/extensions/ruby/languages/ruby/brackets.scm deleted file mode 100644 index f5129f8f310ce4..00000000000000 --- a/extensions/ruby/languages/ruby/brackets.scm +++ /dev/null @@ -1,14 +0,0 @@ -("[" @open "]" @close) -("{" @open "}" @close) -("\"" @open "\"" @close) -("do" @open "end" @close) - -(block_parameters "|" @open "|" @close) -(interpolation "#{" @open "}" @close) - -(if "if" @open "end" @close) -(unless "unless" @open "end" @close) -(begin "begin" @open "end" @close) -(module "module" @open "end" @close) -(_ . "def" @open "end" @close) -(_ . "class" @open "end" @close) diff --git a/extensions/ruby/languages/ruby/config.toml b/extensions/ruby/languages/ruby/config.toml deleted file mode 100644 index e52ec3ddcb5d5f..00000000000000 --- a/extensions/ruby/languages/ruby/config.toml +++ /dev/null @@ -1,51 +0,0 @@ -name = "Ruby" -grammar = "ruby" -path_suffixes = [ - "rb", - "Gemfile", - "Guardfile", - "rake", - "Rakefile", - "ru", - "thor", - "cap", - "capfile", - "Capfile", - "jbuilder", - "rabl", - "rxml", - "builder", - "gemspec", - "rdoc", - "thor", - "pryrc", - "simplecov", - "Steepfile", - "Podfile", - "Brewfile", - "Vagrantfile", - "Puppetfile", -] -first_line_pattern = '^#!.*\bruby\b' -line_comments = ["# "] -autoclose_before = ";:.,=}])>" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true }, - { start = "\"", end = "\"", close = true, newline = false, not_in = [ - "comment", - "string", - ] }, - { start = "'", end = "'", close = true, newline = false, not_in = [ - "comment", - "string", - ] }, -] -collapsed_placeholder = "# ..." -tab_size = 2 -scope_opt_in_language_servers = ["tailwindcss-language-server"] - -[overrides.string] -word_characters = ["-"] -opt_into_language_servers = ["tailwindcss-language-server"] diff --git a/extensions/ruby/languages/ruby/embedding.scm b/extensions/ruby/languages/ruby/embedding.scm deleted file mode 100644 index 7a101e6b092538..00000000000000 --- a/extensions/ruby/languages/ruby/embedding.scm +++ /dev/null @@ -1,22 +0,0 @@ -( - (comment)* @context - . - [ - (module - "module" @name - name: (_) @name) - (method - "def" @name - name: (_) @name - body: (body_statement) @collapse) - (class - "class" @name - name: (_) @name) - (singleton_method - "def" @name - object: (_) @name - "." @name - name: (_) @name - body: (body_statement) @collapse) - ] @item - ) diff --git a/extensions/ruby/languages/ruby/highlights.scm b/extensions/ruby/languages/ruby/highlights.scm deleted file mode 100644 index 17dbb4b07d5b92..00000000000000 --- a/extensions/ruby/languages/ruby/highlights.scm +++ /dev/null @@ -1,202 +0,0 @@ -; Keywords - -[ - "alias" - "and" - "begin" - "break" - "case" - "class" - "def" - "do" - "else" - "elsif" - "end" - "ensure" - "for" - "if" - "in" - "module" - "next" - "or" - "rescue" - "retry" - "return" - "then" - "unless" - "until" - "when" - "while" - "yield" -] @keyword - -((identifier) @keyword - (#match? @keyword "^(private|protected|public)$")) - -; Function calls - -((identifier) @function.method.builtin - (#eq? @function.method.builtin "require")) - -"defined?" @function.method.builtin - -(call - method: [(identifier) (constant)] @function.method) - -; Function definitions - -(alias (identifier) @function.method) -(setter (identifier) @function.method) -(method name: [(identifier) (constant)] @function.method) -(singleton_method name: [(identifier) (constant)] @function.method) -(method_parameters [ - (identifier) @variable.parameter - (optional_parameter name: (identifier) @variable.parameter) - (keyword_parameter [name: (identifier) (":")] @variable.parameter) - ]) - -(block_parameters (identifier) @variable.parameter) - -; Identifiers - -((identifier) @constant.builtin - (#match? @constant.builtin "^__(FILE|LINE|ENCODING)__$")) - -(file) @constant.builtin -(line) @constant.builtin -(encoding) @constant.builtin - -(hash_splat_nil - "**" @operator -) @constant.builtin - -(global_variable) @constant - -(constant) @type - -((constant) @constant - (#match? @constant "^[A-Z\\d_]+$")) - -(superclass - (constant) @type.super) - -(superclass - (scope_resolution - (constant) @type.super)) - -(superclass - (scope_resolution - (scope_resolution - (constant) @type.super))) - -(self) @variable.special -(super) @variable.special - -[ - (class_variable) - (instance_variable) -] @variable.member - - -; Literals - -[ - (string) - (bare_string) - (subshell) - (heredoc_body) - (heredoc_beginning) -] @string - -[ - (simple_symbol) - (delimited_symbol) - (hash_key_symbol) - (bare_symbol) -] @string.special.symbol - -(regex) @string.regex -(escape_sequence) @escape - -[ - (integer) - (float) -] @number - -[ - (nil) - (true) - (false) -] @constant.builtin - -(comment) @comment - -; Operators - -[ - "!" - "~" - "+" - "-" - "**" - "*" - "/" - "%" - "<<" - ">>" - "&" - "|" - "^" - ">" - "<" - "<=" - ">=" - "==" - "!=" - "=~" - "!~" - "<=>" - "||" - "&&" - ".." - "..." - "=" - "**=" - "*=" - "/=" - "%=" - "+=" - "-=" - "<<=" - ">>=" - "&&=" - "&=" - "||=" - "|=" - "^=" - "=>" - "->" - (operator) -] @operator - -[ - "," - ";" - "." - "::" -] @punctuation.delimiter - -[ - "(" - ")" - "[" - "]" - "{" - "}" - "%w(" - "%i(" -] @punctuation.bracket - -(interpolation - "#{" @punctuation.special - "}" @punctuation.special) @embedded diff --git a/extensions/ruby/languages/ruby/indents.scm b/extensions/ruby/languages/ruby/indents.scm deleted file mode 100644 index 0dab1a7c2be589..00000000000000 --- a/extensions/ruby/languages/ruby/indents.scm +++ /dev/null @@ -1,18 +0,0 @@ -(method "end" @end) @indent -(class "end" @end) @indent -(module "end" @end) @indent -(begin "end" @end) @indent -(singleton_method "end" @end) @indent -(do_block "end" @end) @indent - -(then) @indent -(call) @indent - -(ensure) @outdent -(rescue) @outdent -(else) @outdent - - -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent diff --git a/extensions/ruby/languages/ruby/injections.scm b/extensions/ruby/languages/ruby/injections.scm deleted file mode 100644 index 3ad8143b858a13..00000000000000 --- a/extensions/ruby/languages/ruby/injections.scm +++ /dev/null @@ -1,8 +0,0 @@ -(heredoc_body - (heredoc_content) @content - (heredoc_end) @language - (#downcase! @language)) - -((regex - (string_content) @content) - (#set! "language" "regex")) diff --git a/extensions/ruby/languages/ruby/outline.scm b/extensions/ruby/languages/ruby/outline.scm deleted file mode 100644 index 544257ac0c32e0..00000000000000 --- a/extensions/ruby/languages/ruby/outline.scm +++ /dev/null @@ -1,20 +0,0 @@ -(class - "class" @context - name: (_) @name) @item - -((identifier) @context - (#match? @context "^(private|protected|public)$")) @item - -(method - "def" @context - name: (_) @name) @item - -(singleton_method - "def" @context - object: (_) @context - "." @context - name: (_) @name) @item - -(module - "module" @context - name: (_) @name) @item diff --git a/extensions/ruby/languages/ruby/overrides.scm b/extensions/ruby/languages/ruby/overrides.scm deleted file mode 100644 index 7ff82bd3457cc6..00000000000000 --- a/extensions/ruby/languages/ruby/overrides.scm +++ /dev/null @@ -1,3 +0,0 @@ -(comment) @comment -(string) @string -[(simple_symbol) (delimited_symbol)] @simple_symbol diff --git a/extensions/ruby/languages/ruby/runnables.scm b/extensions/ruby/languages/ruby/runnables.scm deleted file mode 100644 index a3e7654a057cf1..00000000000000 --- a/extensions/ruby/languages/ruby/runnables.scm +++ /dev/null @@ -1,58 +0,0 @@ -; Adapted from the following sources: -; Minitest: https://github.com/zidhuss/neotest-minitest/blob/main/lua/neotest-minitest/init.lua -; RSpec: https://github.com/olimorris/neotest-rspec/blob/main/lua/neotest-rspec/init.lua - -; Tests that inherit from a specific class -( - (class - name: [ - (constant) @run - (scope_resolution scope: (constant) name: (constant) @run) - ] - (superclass (scope_resolution) @superclass (#match? @superclass "(::IntegrationTest|::TestCase|::SystemTestCase|Minitest::Test|TLDR)$")) - ) @_ruby-test - (#set! tag ruby-test) -) - -( - (call - method: (identifier) @run (#eq? @run "test") - arguments: (argument_list (string (string_content) @_name)) - ) @_ruby-test - (#set! tag ruby-test) -) - -; Methods that begin with test_ -( - (method - name: (identifier) @run (#match? @run "^test_") - ) @_ruby-test - (#set! tag ruby-test) -) - -; System tests that inherit from ApplicationSystemTestCase -( - (class - name: (constant) @run (superclass) @superclass (#match? @superclass "(ApplicationSystemTestCase)$") - ) @_ruby-test - (#set! tag ruby-test) -) - -; Examples -( - (call - method: (identifier) @run (#any-of? @run "describe" "context" "it" "its" "specify") - arguments: (argument_list . (_) @_name) - ) @_ruby-test - (#set! tag ruby-test) -) - -; Examples (one-liner syntax) -( - (call - method: (identifier) @run (#any-of? @run "it" "its" "specify") - block: (_) @_name - !arguments - ) @_ruby-test - (#set! tag ruby-test) -) diff --git a/extensions/ruby/languages/ruby/tasks.json b/extensions/ruby/languages/ruby/tasks.json deleted file mode 100644 index bba53c38f34cf6..00000000000000 --- a/extensions/ruby/languages/ruby/tasks.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - { - "label": "test $ZED_RELATIVE_FILE:$ZED_ROW", - "command": "echo 'To run tests, configure tasks in your \".zed/tasks.json\" file as described in the Ruby extension documentation.'", - "tags": ["ruby-test"] - } -] diff --git a/extensions/ruby/src/language_servers.rs b/extensions/ruby/src/language_servers.rs deleted file mode 100644 index 3ad3de55ea898d..00000000000000 --- a/extensions/ruby/src/language_servers.rs +++ /dev/null @@ -1,7 +0,0 @@ -mod rubocop; -mod ruby_lsp; -mod solargraph; - -pub use rubocop::*; -pub use ruby_lsp::*; -pub use solargraph::*; diff --git a/extensions/ruby/src/language_servers/rubocop.rs b/extensions/ruby/src/language_servers/rubocop.rs deleted file mode 100644 index d8e342bd510199..00000000000000 --- a/extensions/ruby/src/language_servers/rubocop.rs +++ /dev/null @@ -1,59 +0,0 @@ -use zed_extension_api::{self as zed, settings::LspSettings, LanguageServerId, Result}; - -pub struct RubocopBinary { - pub path: String, - pub args: Option>, -} - -pub struct Rubocop {} - -impl Rubocop { - pub const LANGUAGE_SERVER_ID: &'static str = "rubocop"; - - pub fn new() -> Self { - Self {} - } - - pub fn language_server_command( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let binary = self.language_server_binary(language_server_id, worktree)?; - - Ok(zed::Command { - command: binary.path, - args: binary.args.unwrap_or_else(|| vec!["--lsp".to_string()]), - env: worktree.shell_env(), - }) - } - - fn language_server_binary( - &self, - _language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let binary_settings = LspSettings::for_worktree("rubocop", worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.binary); - let binary_args = binary_settings - .as_ref() - .and_then(|binary_settings| binary_settings.arguments.clone()); - - if let Some(path) = binary_settings.and_then(|binary_settings| binary_settings.path) { - return Ok(RubocopBinary { - path, - args: binary_args, - }); - } - - if let Some(path) = worktree.which("rubocop") { - return Ok(RubocopBinary { - path, - args: binary_args, - }); - } - - Err("rubocop must be installed manually. Install it with `gem install rubocop` or specify the 'binary' path to it via local settings.".to_string()) - } -} diff --git a/extensions/ruby/src/language_servers/ruby_lsp.rs b/extensions/ruby/src/language_servers/ruby_lsp.rs deleted file mode 100644 index 0f3bb2265e26f6..00000000000000 --- a/extensions/ruby/src/language_servers/ruby_lsp.rs +++ /dev/null @@ -1,128 +0,0 @@ -use zed_extension_api::{ - self as zed, - lsp::{Completion, CompletionKind, Symbol, SymbolKind}, - settings::LspSettings, - CodeLabel, CodeLabelSpan, LanguageServerId, Result, -}; - -pub struct RubyLspBinary { - pub path: String, - pub args: Option>, -} - -pub struct RubyLsp {} - -impl RubyLsp { - pub const LANGUAGE_SERVER_ID: &'static str = "ruby-lsp"; - - pub fn new() -> Self { - Self {} - } - - pub fn language_server_command( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let binary = self.language_server_binary(language_server_id, worktree)?; - - Ok(zed::Command { - command: binary.path, - args: binary.args.unwrap_or_default(), - env: worktree.shell_env(), - }) - } - - fn language_server_binary( - &self, - _language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let binary_settings = LspSettings::for_worktree("ruby-lsp", worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.binary); - let binary_args = binary_settings - .as_ref() - .and_then(|binary_settings| binary_settings.arguments.clone()); - - if let Some(path) = binary_settings.and_then(|binary_settings| binary_settings.path) { - return Ok(RubyLspBinary { - path, - args: binary_args, - }); - } - - if let Some(path) = worktree.which("ruby-lsp") { - return Ok(RubyLspBinary { - path, - args: binary_args, - }); - } - - Err( - "ruby-lsp must be installed manually. Install it with `gem install ruby-lsp`." - .to_string(), - ) - } - - pub fn label_for_completion(&self, completion: Completion) -> Option { - let highlight_name = match completion.kind? { - CompletionKind::Class | CompletionKind::Module => "type", - CompletionKind::Constant => "constant", - CompletionKind::Method => "function.method", - CompletionKind::Reference => "function.method", - CompletionKind::Keyword => "keyword", - _ => return None, - }; - - let len = completion.label.len(); - let name_span = CodeLabelSpan::literal(completion.label, Some(highlight_name.to_string())); - - Some(CodeLabel { - code: Default::default(), - spans: vec![name_span], - filter_range: (0..len).into(), - }) - } - - pub fn label_for_symbol(&self, symbol: Symbol) -> Option { - let name = &symbol.name; - - match symbol.kind { - SymbolKind::Method => { - let code = format!("def {name}; end"); - let filter_range = 0..name.len(); - let display_range = 4..4 + name.len(); - - Some(CodeLabel { - code, - spans: vec![CodeLabelSpan::code_range(display_range)], - filter_range: filter_range.into(), - }) - } - SymbolKind::Class | SymbolKind::Module => { - let code = format!("class {name}; end"); - let filter_range = 0..name.len(); - let display_range = 6..6 + name.len(); - - Some(CodeLabel { - code, - spans: vec![CodeLabelSpan::code_range(display_range)], - filter_range: filter_range.into(), - }) - } - SymbolKind::Constant => { - let code = name.to_uppercase().to_string(); - let filter_range = 0..name.len(); - let display_range = 0..name.len(); - - Some(CodeLabel { - code, - spans: vec![CodeLabelSpan::code_range(display_range)], - filter_range: filter_range.into(), - }) - } - _ => None, - } - } -} diff --git a/extensions/ruby/src/language_servers/solargraph.rs b/extensions/ruby/src/language_servers/solargraph.rs deleted file mode 100644 index af736d610466b2..00000000000000 --- a/extensions/ruby/src/language_servers/solargraph.rs +++ /dev/null @@ -1,156 +0,0 @@ -use zed::lsp::{Completion, CompletionKind, Symbol, SymbolKind}; -use zed::{CodeLabel, CodeLabelSpan}; -use zed_extension_api::settings::LspSettings; -use zed_extension_api::{self as zed, LanguageServerId, Result}; - -pub struct SolargraphBinary { - pub path: String, - pub args: Option>, -} - -pub struct Solargraph {} - -impl Solargraph { - pub const LANGUAGE_SERVER_ID: &'static str = "solargraph"; - - pub fn new() -> Self { - Self {} - } - - pub fn language_server_command( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let binary = self.language_server_binary(language_server_id, worktree)?; - - Ok(zed::Command { - command: binary.path, - args: binary.args.unwrap_or_else(|| vec!["stdio".to_string()]), - env: worktree.shell_env(), - }) - } - - fn language_server_binary( - &self, - _language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let binary_settings = LspSettings::for_worktree("solargraph", worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.binary); - let binary_args = binary_settings - .as_ref() - .and_then(|binary_settings| binary_settings.arguments.clone()); - - if let Some(path) = binary_settings.and_then(|binary_settings| binary_settings.path) { - return Ok(SolargraphBinary { - path, - args: binary_args, - }); - } - - if let Some(path) = worktree.which("solargraph") { - return Ok(SolargraphBinary { - path, - args: binary_args, - }); - } - - Err("solargraph must be installed manually".to_string()) - } - - pub fn label_for_completion(&self, completion: Completion) -> Option { - let highlight_name = match completion.kind? { - CompletionKind::Class | CompletionKind::Module => "type", - CompletionKind::Constant => "constant", - CompletionKind::Method => "function.method", - CompletionKind::Keyword => { - if completion.label.starts_with(':') { - "string.special.symbol" - } else { - "keyword" - } - } - CompletionKind::Variable => { - if completion.label.starts_with('@') { - "property" - } else { - return None; - } - } - _ => return None, - }; - - let len = completion.label.len(); - let name_span = CodeLabelSpan::literal(completion.label, Some(highlight_name.to_string())); - - Some(CodeLabel { - code: Default::default(), - spans: if let Some(detail) = completion.detail { - vec![ - name_span, - CodeLabelSpan::literal(" ", None), - CodeLabelSpan::literal(detail, None), - ] - } else { - vec![name_span] - }, - filter_range: (0..len).into(), - }) - } - - pub fn label_for_symbol(&self, symbol: Symbol) -> Option { - let name = &symbol.name; - - return match symbol.kind { - SymbolKind::Method => { - let mut parts = name.split('#'); - let container_name = parts.next()?; - let method_name = parts.next()?; - - if parts.next().is_some() { - return None; - } - - let filter_range = 0..name.len(); - - let spans = vec![ - CodeLabelSpan::literal(container_name, Some("type".to_string())), - CodeLabelSpan::literal("#", None), - CodeLabelSpan::literal(method_name, Some("function.method".to_string())), - ]; - - Some(CodeLabel { - code: name.to_string(), - spans, - filter_range: filter_range.into(), - }) - } - SymbolKind::Class | SymbolKind::Module => { - let class = "class "; - let code = format!("{class}{name}"); - let filter_range = 0..name.len(); - let display_range = class.len()..class.len() + name.len(); - - Some(CodeLabel { - code, - spans: vec![CodeLabelSpan::code_range(display_range)], - filter_range: filter_range.into(), - }) - } - SymbolKind::Constant => { - let code = name.to_uppercase().to_string(); - let filter_range = 0..name.len(); - let display_range = 0..name.len(); - - Some(CodeLabel { - code, - spans: vec![CodeLabelSpan::code_range(display_range)], - filter_range: filter_range.into(), - }) - } - _ => None, - }; - } -} diff --git a/extensions/ruby/src/ruby.rs b/extensions/ruby/src/ruby.rs deleted file mode 100644 index 1c8476e40f9993..00000000000000 --- a/extensions/ruby/src/ruby.rs +++ /dev/null @@ -1,88 +0,0 @@ -mod language_servers; - -use zed::lsp::{Completion, Symbol}; -use zed::settings::LspSettings; -use zed::{serde_json, CodeLabel, LanguageServerId}; -use zed_extension_api::{self as zed, Result}; - -use crate::language_servers::{Rubocop, RubyLsp, Solargraph}; - -struct RubyExtension { - solargraph: Option, - ruby_lsp: Option, - rubocop: Option, -} - -impl zed::Extension for RubyExtension { - fn new() -> Self { - Self { - solargraph: None, - ruby_lsp: None, - rubocop: None, - } - } - - fn language_server_command( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - match language_server_id.as_ref() { - Solargraph::LANGUAGE_SERVER_ID => { - let solargraph = self.solargraph.get_or_insert_with(Solargraph::new); - solargraph.language_server_command(language_server_id, worktree) - } - RubyLsp::LANGUAGE_SERVER_ID => { - let ruby_lsp = self.ruby_lsp.get_or_insert_with(RubyLsp::new); - ruby_lsp.language_server_command(language_server_id, worktree) - } - Rubocop::LANGUAGE_SERVER_ID => { - let rubocop = self.rubocop.get_or_insert_with(Rubocop::new); - rubocop.language_server_command(language_server_id, worktree) - } - language_server_id => Err(format!("unknown language server: {language_server_id}")), - } - } - - fn label_for_symbol( - &self, - language_server_id: &LanguageServerId, - symbol: Symbol, - ) -> Option { - match language_server_id.as_ref() { - Solargraph::LANGUAGE_SERVER_ID => self.solargraph.as_ref()?.label_for_symbol(symbol), - RubyLsp::LANGUAGE_SERVER_ID => self.ruby_lsp.as_ref()?.label_for_symbol(symbol), - _ => None, - } - } - - fn label_for_completion( - &self, - language_server_id: &LanguageServerId, - completion: Completion, - ) -> Option { - match language_server_id.as_ref() { - Solargraph::LANGUAGE_SERVER_ID => { - self.solargraph.as_ref()?.label_for_completion(completion) - } - RubyLsp::LANGUAGE_SERVER_ID => self.ruby_lsp.as_ref()?.label_for_completion(completion), - _ => None, - } - } - - fn language_server_initialization_options( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result> { - let initialization_options = - LspSettings::for_worktree(language_server_id.as_ref(), worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.initialization_options.clone()) - .unwrap_or_default(); - - Ok(Some(serde_json::json!(initialization_options))) - } -} - -zed::register_extension!(RubyExtension); diff --git a/extensions/ruff/Cargo.toml b/extensions/ruff/Cargo.toml index 50e0ae3908e0e6..b6c31ebbc86100 100644 --- a/extensions/ruff/Cargo.toml +++ b/extensions/ruff/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_ruff" -version = "0.0.2" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/ruff/extension.toml b/extensions/ruff/extension.toml index d622b37c685e05..63929fc1911971 100644 --- a/extensions/ruff/extension.toml +++ b/extensions/ruff/extension.toml @@ -1,7 +1,7 @@ id = "ruff" name = "Ruff" description = "Support for Ruff, the Python linter and formatter" -version = "0.0.2" +version = "0.1.0" schema_version = 1 authors = [] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/ruff/src/ruff.rs b/extensions/ruff/src/ruff.rs index c7c51ec7b9dd68..048c5893ca773d 100644 --- a/extensions/ruff/src/ruff.rs +++ b/extensions/ruff/src/ruff.rs @@ -89,7 +89,10 @@ impl RuffExtension { .ok_or_else(|| format!("no asset found matching {:?}", asset_name))?; let version_dir = format!("ruff-{}", release.version); - let binary_path = format!("{version_dir}/{asset_stem}/ruff"); + let binary_path = match platform { + zed::Os::Windows => format!("{version_dir}/ruff.exe"), + _ => format!("{version_dir}/{asset_stem}/ruff"), + }; if !fs::metadata(&binary_path).map_or(false, |stat| stat.is_file()) { zed::set_language_server_installation_status( diff --git a/extensions/scheme/languages/scheme/highlights.scm b/extensions/scheme/languages/scheme/highlights.scm index 40ba61cd055948..76a4ba7a52a5c4 100644 --- a/extensions/scheme/languages/scheme/highlights.scm +++ b/extensions/scheme/languages/scheme/highlights.scm @@ -7,7 +7,7 @@ (symbol) @variable (string) @string -(escape_sequence) @escape +(escape_sequence) @string.escape [(comment) (block_comment) diff --git a/extensions/svelte/.gitignore b/extensions/svelte/.gitignore deleted file mode 100644 index 6aba30215ee94f..00000000000000 --- a/extensions/svelte/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -target -*.wasm -grammars diff --git a/extensions/svelte/Cargo.toml b/extensions/svelte/Cargo.toml deleted file mode 100644 index d07d517af66a77..00000000000000 --- a/extensions/svelte/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "zed_svelte" -version = "0.1.1" -edition = "2021" -publish = false -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/svelte.rs" -crate-type = ["cdylib"] - -[dependencies] -zed_extension_api = "0.1.0" diff --git a/extensions/svelte/LICENSE-APACHE b/extensions/svelte/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3affa..00000000000000 --- a/extensions/svelte/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/svelte/extension.toml b/extensions/svelte/extension.toml deleted file mode 100644 index 9ca1d6c5daafdb..00000000000000 --- a/extensions/svelte/extension.toml +++ /dev/null @@ -1,15 +0,0 @@ -id = "svelte" -name = "Svelte" -description = "Svelte support" -version = "0.1.1" -schema_version = 1 -authors = [] -repository = "https://github.com/zed-extensions/svelte" - -[language_servers.svelte-language-server] -name = "Svelte Language Server" -language = "Svelte" - -[grammars.svelte] -repository = "https://github.com/Himujjal/tree-sitter-svelte" -commit = "b08d070e303d2a385d6d0ab3add500f8fa514443" diff --git a/extensions/svelte/languages/svelte/config.toml b/extensions/svelte/languages/svelte/config.toml deleted file mode 100644 index 3bab2f29430813..00000000000000 --- a/extensions/svelte/languages/svelte/config.toml +++ /dev/null @@ -1,22 +0,0 @@ -name = "Svelte" -grammar = "svelte" -path_suffixes = ["svelte"] -block_comment = [""] -autoclose_before = ";:.,=}])>" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true }, - { start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] }, - { start = "`", end = "`", close = true, newline = false, not_in = ["string"] }, - { start = "/*", end = " */", close = true, newline = false, not_in = ["string", "comment"] }, -] -scope_opt_in_language_servers = ["tailwindcss-language-server"] -prettier_parser_name = "svelte" -prettier_plugins = ["prettier-plugin-svelte"] - -[overrides.string] -word_characters = ["-"] -opt_into_language_servers = ["tailwindcss-language-server"] diff --git a/extensions/svelte/languages/svelte/highlights.scm b/extensions/svelte/languages/svelte/highlights.scm deleted file mode 100755 index 4e317489d832fa..00000000000000 --- a/extensions/svelte/languages/svelte/highlights.scm +++ /dev/null @@ -1,50 +0,0 @@ -; Special identifiers -;-------------------- - -; Treat capitalized tag names as constructors and types -((tag_name) @type - (#match? @type "^[A-Z]")) - -; Regular (lowercase) tag names -((tag_name) @tag - (#match? @tag "^[a-z]")) - -; TODO: -(attribute_name) @property -(erroneous_end_tag_name) @keyword -(comment) @comment - -[ - (attribute_value) - (quoted_attribute_value) -] @string - -[ - (text) - (raw_text_expr) - (raw_text_each) -] @none - -[ - (special_block_keyword) - (then) - (as) -] @keyword - -[ - "{" - "}" -] @punctuation.bracket - -"=" @operator - -[ - "<" - ">" - "" - "#" - ":" - "/" - "@" -] @tag.delimiter diff --git a/extensions/svelte/languages/svelte/indents.scm b/extensions/svelte/languages/svelte/indents.scm deleted file mode 100755 index 89082e1a505387..00000000000000 --- a/extensions/svelte/languages/svelte/indents.scm +++ /dev/null @@ -1,9 +0,0 @@ -[ - (element) - (if_statement) - (each_statement) - (await_statement) - (snippet_statement) - (script_element) - (style_element) -] @indent diff --git a/extensions/svelte/languages/svelte/injections.scm b/extensions/svelte/languages/svelte/injections.scm deleted file mode 100755 index 24f9425803e2d4..00000000000000 --- a/extensions/svelte/languages/svelte/injections.scm +++ /dev/null @@ -1,74 +0,0 @@ -; injections.scm -; -------------- - -; match script tags without a lang tag -((script_element - (start_tag - (attribute - (attribute_name) @_name)*) - (raw_text) @content) - (#not-eq? @_name "lang") - (#set! "language" "javascript")) - -; match javascript -((script_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "js") - (#set! "language" "javascript")) - -; match typescript -((script_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "ts") - (#set! "language" "typescript")) - -(style_element - (raw_text) @content - (#set! "language" "css")) - -; match style tags without a lang tag -((style_element - (start_tag - (attribute - (attribute_name) @_name)*) - (raw_text) @content) - (#not-eq? @_name "lang") - (#set! "language" "css")) - -; match css -((style_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "css") - (#set! "language" "css")) - -; match scss -((style_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "scss") - (#set! "language" "scss")) - -((raw_text_expr) @content - (#set! "language" "javascript")) - -((raw_text_each) @content - (#set! "language" "javascript")) diff --git a/extensions/svelte/languages/svelte/overrides.scm b/extensions/svelte/languages/svelte/overrides.scm deleted file mode 100644 index 2a76410297833c..00000000000000 --- a/extensions/svelte/languages/svelte/overrides.scm +++ /dev/null @@ -1,7 +0,0 @@ -(comment) @comment - -[ - (raw_text) - (attribute_value) - (quoted_attribute_value) -] @string diff --git a/extensions/svelte/src/svelte.rs b/extensions/svelte/src/svelte.rs deleted file mode 100644 index 378e763e04ea56..00000000000000 --- a/extensions/svelte/src/svelte.rs +++ /dev/null @@ -1,124 +0,0 @@ -use std::{env, fs}; -use zed_extension_api::{self as zed, serde_json, Result}; - -struct SvelteExtension { - did_find_server: bool, -} - -const SERVER_PATH: &str = "node_modules/svelte-language-server/bin/server.js"; -const PACKAGE_NAME: &str = "svelte-language-server"; - -impl SvelteExtension { - fn server_exists(&self) -> bool { - fs::metadata(SERVER_PATH).map_or(false, |stat| stat.is_file()) - } - - fn server_script_path(&mut self, id: &zed::LanguageServerId) -> Result { - let server_exists = self.server_exists(); - if self.did_find_server && server_exists { - return Ok(SERVER_PATH.to_string()); - } - - zed::set_language_server_installation_status( - id, - &zed::LanguageServerInstallationStatus::CheckingForUpdate, - ); - let version = zed::npm_package_latest_version(PACKAGE_NAME)?; - - if !server_exists - || zed::npm_package_installed_version(PACKAGE_NAME)?.as_ref() != Some(&version) - { - zed::set_language_server_installation_status( - id, - &zed::LanguageServerInstallationStatus::Downloading, - ); - let result = zed::npm_install_package(PACKAGE_NAME, &version); - match result { - Ok(()) => { - if !self.server_exists() { - Err(format!( - "installed package '{PACKAGE_NAME}' did not contain expected path '{SERVER_PATH}'", - ))?; - } - } - Err(error) => { - if !self.server_exists() { - Err(error)?; - } - } - } - } - - self.did_find_server = true; - Ok(SERVER_PATH.to_string()) - } -} - -impl zed::Extension for SvelteExtension { - fn new() -> Self { - Self { - did_find_server: false, - } - } - - fn language_server_command( - &mut self, - id: &zed::LanguageServerId, - _: &zed::Worktree, - ) -> Result { - let server_path = self.server_script_path(id)?; - Ok(zed::Command { - command: zed::node_binary_path()?, - args: vec![ - env::current_dir() - .unwrap() - .join(&server_path) - .to_string_lossy() - .to_string(), - "--stdio".to_string(), - ], - env: Default::default(), - }) - } - - fn language_server_initialization_options( - &mut self, - _: &zed::LanguageServerId, - _: &zed::Worktree, - ) -> Result> { - let config = serde_json::json!({ - "inlayHints": { - "parameterNames": { - "enabled": "all", - "suppressWhenArgumentMatchesName": false - }, - "parameterTypes": { - "enabled": true - }, - "variableTypes": { - "enabled": true, - "suppressWhenTypeMatchesName": false - }, - "propertyDeclarationTypes": { - "enabled": true - }, - "functionLikeReturnTypes": { - "enabled": true - }, - "enumMemberValues": { - "enabled": true - } - } - }); - - Ok(Some(serde_json::json!({ - "provideFormatter": true, - "configuration": { - "typescript": config, - "javascript": config - } - }))) - } -} - -zed::register_extension!(SvelteExtension); diff --git a/extensions/terraform/Cargo.toml b/extensions/terraform/Cargo.toml index 7892b68466cc6e..56ae621e167efc 100644 --- a/extensions/terraform/Cargo.toml +++ b/extensions/terraform/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_terraform" -version = "0.1.0" +version = "0.1.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/terraform/extension.toml b/extensions/terraform/extension.toml index 80fe03fc04431e..fc96f773e9b238 100644 --- a/extensions/terraform/extension.toml +++ b/extensions/terraform/extension.toml @@ -1,7 +1,7 @@ id = "terraform" name = "Terraform" description = "Terraform support." -version = "0.1.0" +version = "0.1.1" schema_version = 1 authors = ["Caius Durling ", "Daniel Banck "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/terraform/languages/hcl/config.toml b/extensions/terraform/languages/hcl/config.toml index 891b2f38d4182c..be7e601e014482 100644 --- a/extensions/terraform/languages/hcl/config.toml +++ b/extensions/terraform/languages/hcl/config.toml @@ -12,3 +12,4 @@ brackets = [ { start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] }, { start = "/*", end = " */", close = true, newline = false, not_in = ["comment", "string"] }, ] +tab_size = 2 diff --git a/extensions/terraform/languages/hcl/indents.scm b/extensions/terraform/languages/hcl/indents.scm index 74edb66bdf28d8..be12af1d0cd686 100644 --- a/extensions/terraform/languages/hcl/indents.scm +++ b/extensions/terraform/languages/hcl/indents.scm @@ -6,6 +6,8 @@ (function_call) ] @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent -(_ "{" "}" @end) @indent +[ + "]" + "}" + ")" +] @outdent diff --git a/extensions/terraform/languages/terraform-vars/config.toml b/extensions/terraform/languages/terraform-vars/config.toml index 12ed7e236c4fea..4d803ee36e934d 100644 --- a/extensions/terraform/languages/terraform-vars/config.toml +++ b/extensions/terraform/languages/terraform-vars/config.toml @@ -12,3 +12,4 @@ brackets = [ { start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] }, { start = "/*", end = " */", close = true, newline = false, not_in = ["comment", "string"] }, ] +tab_size = 2 diff --git a/extensions/terraform/languages/terraform-vars/indents.scm b/extensions/terraform/languages/terraform-vars/indents.scm index 95ad93df1da98b..b9ba0ad56a3ed0 100644 --- a/extensions/terraform/languages/terraform-vars/indents.scm +++ b/extensions/terraform/languages/terraform-vars/indents.scm @@ -6,9 +6,11 @@ (function_call) ] @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent -(_ "{" "}" @end) @indent +[ + "]" + "}" + ")" +] @outdent ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/terraform/indents.scm ; inherits: hcl diff --git a/extensions/terraform/languages/terraform/indents.scm b/extensions/terraform/languages/terraform/indents.scm index 95ad93df1da98b..b9ba0ad56a3ed0 100644 --- a/extensions/terraform/languages/terraform/indents.scm +++ b/extensions/terraform/languages/terraform/indents.scm @@ -6,9 +6,11 @@ (function_call) ] @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent -(_ "{" "}" @end) @indent +[ + "]" + "}" + ")" +] @outdent ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/terraform/indents.scm ; inherits: hcl diff --git a/extensions/test-extension/Cargo.toml b/extensions/test-extension/Cargo.toml index 094302e89f47a1..5e17a9a6a3c549 100644 --- a/extensions/test-extension/Cargo.toml +++ b/extensions/test-extension/Cargo.toml @@ -13,4 +13,4 @@ path = "src/test_extension.rs" crate-type = ["cdylib"] [dependencies] -zed_extension_api = "0.1.0" +zed_extension_api = { path = "../../crates/extension_api" } diff --git a/extensions/toml/languages/toml/highlights.scm b/extensions/toml/languages/toml/highlights.scm index 04d83b545925d7..4be265cce74b3d 100644 --- a/extensions/toml/languages/toml/highlights.scm +++ b/extensions/toml/languages/toml/highlights.scm @@ -9,9 +9,10 @@ (boolean) @constant (comment) @comment -(string) @string (integer) @number (float) @number +(string) @string +(escape_sequence) @string.escape (offset_date_time) @string.special (local_date_time) @string.special (local_date) @string.special diff --git a/extensions/vue/Cargo.toml b/extensions/vue/Cargo.toml deleted file mode 100644 index 31d6603ec3193e..00000000000000 --- a/extensions/vue/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "zed_vue" -version = "0.1.0" -edition = "2021" -publish = false -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/vue.rs" -crate-type = ["cdylib"] - -[dependencies] -serde = { version = "1.0", features = ["derive"] } -zed_extension_api = "0.1.0" diff --git a/extensions/vue/LICENSE-APACHE b/extensions/vue/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3affa..00000000000000 --- a/extensions/vue/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/vue/extension.toml b/extensions/vue/extension.toml deleted file mode 100644 index 2050d034689d35..00000000000000 --- a/extensions/vue/extension.toml +++ /dev/null @@ -1,19 +0,0 @@ -id = "vue" -name = "Vue" -description = "Vue support." -version = "0.1.0" -schema_version = 1 -authors = ["Piotr Osiewicz "] -repository = "https://github.com/zed-industries/zed" - -[language_servers.vue-language-server] -name = "Vue Language Server" -language = "Vue.js" -language_ids = { "Vue.js" = "vue" } -# REFACTOR is explicitly disabled, as vue-lsp does not adhere to LSP protocol for code actions with these - it -# sends back a CodeAction with neither `command` nor `edits` fields set, which is against the spec. -code_action_kinds = ["", "quickfix", "refactor.rewrite"] - -[grammars.vue] -repository = "https://github.com/tree-sitter-grammars/tree-sitter-vue" -commit = "7e48557b903a9db9c38cea3b7839ef7e1f36c693" diff --git a/extensions/vue/languages/vue/brackets.scm b/extensions/vue/languages/vue/brackets.scm deleted file mode 100644 index 2d12b17daab82f..00000000000000 --- a/extensions/vue/languages/vue/brackets.scm +++ /dev/null @@ -1,2 +0,0 @@ -("<" @open ">" @close) -("\"" @open "\"" @close) diff --git a/extensions/vue/languages/vue/config.toml b/extensions/vue/languages/vue/config.toml deleted file mode 100644 index 606c733ffc1f5d..00000000000000 --- a/extensions/vue/languages/vue/config.toml +++ /dev/null @@ -1,22 +0,0 @@ -name = "Vue.js" -code_fence_block_name = "vue" -grammar = "vue" -path_suffixes = ["vue"] -block_comment = [""] -autoclose_before = ";:.,=}])>" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "(", end = ")", close = true, newline = true }, - { start = "<", end = ">", close = true, newline = true, not_in = ["string", "comment"] }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] }, - { start = "`", end = "`", close = true, newline = false, not_in = ["string"] }, -] -word_characters = ["-"] -scope_opt_in_language_servers = ["tailwindcss-language-server"] -prettier_parser_name = "vue" - -[overrides.string] -word_characters = ["-"] -opt_into_language_servers = ["tailwindcss-language-server"] diff --git a/extensions/vue/languages/vue/highlights.scm b/extensions/vue/languages/vue/highlights.scm deleted file mode 100644 index 548f57d26762a3..00000000000000 --- a/extensions/vue/languages/vue/highlights.scm +++ /dev/null @@ -1,15 +0,0 @@ -(attribute) @property -(directive_attribute) @property -(quoted_attribute_value) @string -(interpolation) @punctuation.special -(raw_text) @embedded - -((tag_name) @type - (#match? @type "^[A-Z]")) - -(directive_name) @keyword -(directive_argument) @constant - -(start_tag) @tag -(end_tag) @tag -(self_closing_tag) @tag diff --git a/extensions/vue/languages/vue/injections.scm b/extensions/vue/languages/vue/injections.scm deleted file mode 100644 index 0cb0ec1804037b..00000000000000 --- a/extensions/vue/languages/vue/injections.scm +++ /dev/null @@ -1,60 +0,0 @@ -;